[root@hadoop-001 spark-2.2.1-bin-hadoop2.7]# jps
4849 NodeManager
4563 SecondaryNameNode
4745 ResourceManager
5163 Jps
4429 DataNode
4301 NameNode
3406 RunJar
[root@hadoop-001 spark-2.2.1-bin-hadoop2.7]# ./sbin/start-all.sh 
starting org.apache.spark.deploy.master.Master, logging to /usr/soft/myspark/spark-2.2.1-bin-hadoop2.7/logs/spark-root-org.apache.spark.deploy.master.Master-1-hadoop-001.out
hadoop-003: starting org.apache.spark.deploy.worker.Worker, logging to /usr/soft/myspark/spark-2.2.1-bin-hadoop2.7/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-hadoop-003.out
hadoop-002: starting org.apache.spark.deploy.worker.Worker, logging to /usr/soft/myspark/spark-2.2.1-bin-hadoop2.7/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-hadoop-002.out
[root@hadoop-001 spark-2.2.1-bin-hadoop2.7]# ./bin/spark-sql 
log4j:WARN No appenders could be found for logger (org.apache.hadoop.util.Shell).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
19/10/22 05:36:28 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
19/10/22 05:36:28 INFO HiveMetaStore: 0: Opening raw store with implemenation class:org.apache.hadoop.hive.metastore.ObjectStore
19/10/22 05:36:28 INFO ObjectStore: ObjectStore, initialize called
19/10/22 05:36:29 INFO Persistence: Property hive.metastore.integral.jdo.pushdown unknown - will be ignored
19/10/22 05:36:29 INFO Persistence: Property datanucleus.cache.level2 unknown - will be ignored
19/10/22 05:36:31 INFO ObjectStore: Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes="Table,StorageDescriptor,SerDeInfo,Partition,Database,Type,FieldSchema,Order"
19/10/22 05:36:33 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MFieldSchema" is tagged as "embedded-only" so does not have its own datastore table.
19/10/22 05:36:33 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MOrder" is tagged as "embedded-only" so does not have its own datastore table.
19/10/22 05:36:34 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MFieldSchema" is tagged as "embedded-only" so does not have its own datastore table.
19/10/22 05:36:34 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MOrder" is tagged as "embedded-only" so does not have its own datastore table.
19/10/22 05:36:35 INFO MetaStoreDirectSql: Using direct SQL, underlying DB is DERBY
19/10/22 05:36:35 INFO ObjectStore: Initialized ObjectStore
19/10/22 05:36:35 WARN ObjectStore: Version information not found in metastore. hive.metastore.schema.verification is not enabled so recording the schema version 1.2.0
19/10/22 05:36:35 WARN ObjectStore: Failed to get database default, returning NoSuchObjectException
19/10/22 05:36:36 INFO HiveMetaStore: Added admin role in metastore
19/10/22 05:36:36 INFO HiveMetaStore: Added public role in metastore
19/10/22 05:36:36 INFO HiveMetaStore: No user is added in admin role, since config is empty
19/10/22 05:36:36 INFO HiveMetaStore: 0: get_all_databases
19/10/22 05:36:36 INFO audit: ugi=root ip=unknown-ip-addr cmd=get_all_databases
19/10/22 05:36:36 INFO HiveMetaStore: 0: get_functions: db=default pat=*
19/10/22 05:36:36 INFO audit: ugi=root ip=unknown-ip-addr cmd=get_functions: db=default pat=*
19/10/22 05:36:36 INFO Datastore: The class "org.apache.hadoop.hive.metastore.model.MResourceUri" is tagged as "embedded-only" so does not have its own datastore table.
19/10/22 05:36:37 INFO SessionState: Created local directory: /tmp/6204f282-230d-49b3-951f-298f8063861c_resources
19/10/22 05:36:37 INFO SessionState: Created HDFS directory: /tmp/hive/root/6204f282-230d-49b3-951f-298f8063861c
19/10/22 05:36:37 INFO SessionState: Created local directory: /tmp/root/6204f282-230d-49b3-951f-298f8063861c
19/10/22 05:36:37 INFO SessionState: Created HDFS directory: /tmp/hive/root/6204f282-230d-49b3-951f-298f8063861c/_tmp_space.db
19/10/22 05:36:37 INFO SparkContext: Running Spark version 2.2.1
19/10/22 05:36:37 INFO SparkContext: Submitted application: SparkSQL::192.168.137.111
19/10/22 05:36:37 INFO SecurityManager: Changing view acls to: root
19/10/22 05:36:37 INFO SecurityManager: Changing modify acls to: root
19/10/22 05:36:37 INFO SecurityManager: Changing view acls groups to: 
19/10/22 05:36:37 INFO SecurityManager: Changing modify acls groups to: 
19/10/22 05:36:37 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users  with view permissions: Set(root); groups with view permissions: Set(); users  with modify permissions: Set(root); groups with modify permissions: Set()
19/10/22 05:36:37 INFO Utils: Successfully started service 'sparkDriver' on port 40795.
19/10/22 05:36:37 INFO SparkEnv: Registering MapOutputTracker
19/10/22 05:36:37 INFO SparkEnv: Registering BlockManagerMaster
19/10/22 05:36:37 INFO BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
19/10/22 05:36:37 INFO BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
19/10/22 05:36:37 INFO DiskBlockManager: Created local directory at /tmp/blockmgr-4b0a5963-8b9c-4e51-9cc9-55545b11e384
19/10/22 05:36:38 INFO MemoryStore: MemoryStore started with capacity 413.9 MB
19/10/22 05:36:38 INFO SparkEnv: Registering OutputCommitCoordinator
19/10/22 05:36:38 INFO Utils: Successfully started service 'SparkUI' on port 4040.
19/10/22 05:36:38 INFO SparkUI: Bound SparkUI to 0.0.0.0, and started at http://192.168.137.111:4040
19/10/22 05:36:38 INFO Executor: Starting executor ID driver on host localhost
19/10/22 05:36:38 INFO Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 43209.
19/10/22 05:36:38 INFO NettyBlockTransferService: Server created on 192.168.137.111:43209
19/10/22 05:36:38 INFO BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
19/10/22 05:36:38 INFO BlockManagerMaster: Registering BlockManager BlockManagerId(driver, 192.168.137.111, 43209, None)
19/10/22 05:36:38 INFO BlockManagerMasterEndpoint: Registering block manager 192.168.137.111:43209 with 413.9 MB RAM, BlockManagerId(driver, 192.168.137.111, 43209, None)
19/10/22 05:36:38 INFO BlockManagerMaster: Registered BlockManager BlockManagerId(driver, 192.168.137.111, 43209, None)
19/10/22 05:36:38 INFO BlockManager: Initialized BlockManager: BlockManagerId(driver, 192.168.137.111, 43209, None)
19/10/22 05:36:39 INFO SharedState: loading hive config file: file:/usr/soft/myspark/spark-2.2.1-bin-hadoop2.7/conf/hive-site.xml
19/10/22 05:36:39 INFO SharedState: Setting hive.metastore.warehouse.dir ('null') to the value of spark.sql.warehouse.dir ('file:/usr/soft/myspark/spark-2.2.1-bin-hadoop2.7/spark-warehouse').
19/10/22 05:36:39 INFO SharedState: Warehouse path is 'file:/usr/soft/myspark/spark-2.2.1-bin-hadoop2.7/spark-warehouse'.
19/10/22 05:36:39 INFO HiveUtils: Initializing HiveMetastoreConnection version 1.2.1 using Spark classes.
19/10/22 05:36:40 INFO metastore: Trying to connect to metastore with URI thrift://192.168.137.111:9083我的配置文件hive-site.xml
<configuration>
<property>
        <name>javax.jdo.option.ConnectionURL</name>
        <value>jdbc:mysql://192.168.137.111:3306/hive1812?characterEncoding=UTF-8&amp;createDatabaseIfNotExist=true</value>
</property>
<property>
        <name>javax.jdo.option.ConnectionDriverName</name>
        <value>com.mysql.jdbc.Driver</value>
</property>
<property>
        <name>javax.jdo.option.ConnectionUserName</name>
        <value>root</value>
</property>
<property>
        <name>javax.jdo.option.ConnectionPassword</name>
        <value>123456</value>
</property>
<property>
        <name>hive.cli.print.current.db</name>
        <value>true</value>
</property>
<property>
        <name>hive.exec.mode.local.auto</name>
        <value>true</value>
</property>
<property>
        <name>hbase.zookeeper.quorum</name>
        <value>hadoop-001:2181,hadoop-002:2181,hadoop-003:2181</value>
</property>
<property>
                <name>zookeeper.znode.parent</name>
                <value>/hbase</value>
</property>
<property>
        <name>hive.metastore.schema.verification</name>
        <value>false</value>
</property>
</configuration>
一直进不去也不知道为啥。mysql架包已经导入spark/jars里面了求大神指教

解决方案 »

  1.   

    19/10/22 05:36:39 INFO SharedState: Warehouse path is 'file:/usr/soft/myspark/spark-2.2.1-bin-hadoop2.7/spark-warehouse'.
    19/10/22 05:36:39 INFO HiveUtils: Initializing HiveMetastoreConnection version 1.2.1 using Spark classes.
    19/10/22 05:36:40 INFO metastore: Trying to connect to metastore with URI thrift://192.168.137.111:9083
    19/10/22 05:46:40 WARN metastore: set_ugi() not successful, Likely cause: new client talking to old server. Continuing without it.
    org.apache.thrift.transport.TTransportException: java.net.SocketTimeoutException: Read timed out
    at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:129)
    at org.apache.thrift.transport.TTransport.readAll(TTransport.java:86)
    at org.apache.thrift.protocol.TBinaryProtocol.readAll(TBinaryProtocol.java:429)
    at org.apache.thrift.protocol.TBinaryProtocol.readI32(TBinaryProtocol.java:318)
    at org.apache.thrift.protocol.TBinaryProtocol.readMessageBegin(TBinaryProtocol.java:219)
    at org.apache.thrift.TServiceClient.receiveBase(TServiceClient.java:77)
    at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.recv_set_ugi(ThriftHiveMetastore.java:3688)
    at org.apache.hadoop.hive.metastore.api.ThriftHiveMetastore$Client.set_ugi(ThriftHiveMetastore.java:3674)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.open(HiveMetaStoreClient.java:436)
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:236)
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
    at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:191)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:422)
    at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264)
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:362)
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:266)
    at org.apache.spark.sql.hive.HiveExternalCatalog.client$lzycompute(HiveExternalCatalog.scala:66)
    at org.apache.spark.sql.hive.HiveExternalCatalog.client(HiveExternalCatalog.scala:65)
    at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply$mcZ$sp(HiveExternalCatalog.scala:195)
    at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
    at org.apache.spark.sql.hive.HiveExternalCatalog$$anonfun$databaseExists$1.apply(HiveExternalCatalog.scala:195)
    at org.apache.spark.sql.hive.HiveExternalCatalog.withClient(HiveExternalCatalog.scala:97)
    at org.apache.spark.sql.hive.HiveExternalCatalog.databaseExists(HiveExternalCatalog.scala:194)
    at org.apache.spark.sql.internal.SharedState.externalCatalog$lzycompute(SharedState.scala:105)
    at org.apache.spark.sql.internal.SharedState.externalCatalog(SharedState.scala:93)
    at org.apache.spark.sql.hive.thriftserver.SparkSQLEnv$.init(SparkSQLEnv.scala:53)
    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.<init>(SparkSQLCLIDriver.scala:293)
    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:138)
    at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:775)
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:119)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
    Caused by: java.net.SocketTimeoutException: Read timed out
    at java.net.SocketInputStream.socketRead0(Native Method)
    at java.net.SocketInputStream.socketRead(SocketInputStream.java:116)
    at java.net.SocketInputStream.read(SocketInputStream.java:170)
    at java.net.SocketInputStream.read(SocketInputStream.java:141)
    at java.io.BufferedInputStream.fill(BufferedInputStream.java:246)
    at java.io.BufferedInputStream.read1(BufferedInputStream.java:286)
    at java.io.BufferedInputStream.read(BufferedInputStream.java:345)
    at org.apache.thrift.transport.TIOStreamTransport.read(TIOStreamTransport.java:127)
    ... 54 more
    19/10/22 05:46:40 INFO metastore: Connected to metastore.