2017-10-17 11 views
1

私はSpark 2.1.1を使用しています。 metastoreのバックエンドとしてmysqlを使用します。私の火花シェルの起動時にspark-shellの起動時に例外が発生しました:データストアからスキーマカラム情報を取得する際に例外が発生しました

は、私は、この例外が発生します。

17/10/17 21:02:58 WARN Query: Query for candidates of org.apache.hadoop.hive.metastore.model.MTableColumnStatistics and subclasses resulted in no possible candidates 
Exception thrown obtaining schema column information from datastore 
org.datanucleus.exceptions.NucleusDataStoreException: Exception thrown obtaining schema column information from datastore 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:986) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:821) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:770) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:206) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:2378) 
    at org.datanucleus.store.rdbms.table.TableImpl.validateColumns(TableImpl.java:215) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:3393) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:3190) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2841) 
    at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:122) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:1605) 
    at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:954) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:679) 
    at org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:408) 
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:947) 
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:370) 
    at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 
    at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 
    at org.datanucleus.store.query.Query.execute(Query.java:1654) 
    at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 
    at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:184) 
    at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 
    at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 
    at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 
    at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 
    at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 
    at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 
    at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:188) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:358) 
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:262) 
    at org.apache.spark.sql.hive.HiveExternalCatalog.<init>(HiveExternalCatalog.scala:66) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.spark.sql.internal.SharedState$.org$apache$spark$sql$internal$SharedState$$reflect(SharedState.scala:166) 
    at org.apache.spark.sql.internal.SharedState.<init>(SharedState.scala:86) 
    at org.apache.spark.sql.SparkSession$$anonfun$sharedState$1.apply(SparkSession.scala:101) 
    at org.apache.spark.sql.SparkSession$$anonfun$sharedState$1.apply(SparkSession.scala:101) 
    at scala.Option.getOrElse(Option.scala:121) 
    at org.apache.spark.sql.SparkSession.sharedState$lzycompute(SparkSession.scala:101) 
    at org.apache.spark.sql.SparkSession.sharedState(SparkSession.scala:100) 
    at org.apache.spark.sql.internal.SessionState.<init>(SessionState.scala:157) 
    at org.apache.spark.sql.hive.HiveSessionState.<init>(HiveSessionState.scala:32) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$reflect(SparkSession.scala:978) 
    at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:110) 
    at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:109) 
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:878) 
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:878) 
    at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) 
    at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) 
    at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230) 
    at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40) 
    at scala.collection.mutable.HashMap.foreach(HashMap.scala:99) 
    at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:878) 
    at org.apache.spark.repl.Main$.createSparkSession(Main.scala:96) 
    at $line3.$read$$iw$$iw.<init>(<console>:15) 
    at $line3.$read$$iw.<init>(<console>:42) 
    at $line3.$read.<init>(<console>:44) 
    at $line3.$read$.<init>(<console>:48) 
    at $line3.$read$.<clinit>(<console>) 
    at $line3.$eval$.$print$lzycompute(<console>:7) 
    at $line3.$eval$.$print(<console>:6) 
    at $line3.$eval.$print(<console>) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:498) 
    at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786) 
    at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047) 
    at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638) 
    at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637) 
    at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) 
    at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19) 
    at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637) 
    at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569) 
    at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565) 
    at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807) 
    at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681) 
    at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395) 
    at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV$sp(SparkILoop.scala:38) 
    at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) 
    at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) 
    at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:214) 
    at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:37) 
    at org.apache.spark.repl.SparkILoop.loadFiles(SparkILoop.scala:105) 
    at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:920) 
    at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) 
    at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) 
    at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97) 
    at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909) 
    at org.apache.spark.repl.Main$.doMain(Main.scala:69) 
    at org.apache.spark.repl.Main$.main(Main.scala:52) 
    at org.apache.spark.repl.Main.main(Main.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:498) 
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743) 
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187) 
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212) 
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126) 
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 
Caused by: java.sql.SQLException: Column name pattern can not be NULL or empty. 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:545) 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:513) 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:505) 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:479) 
    at com.mysql.cj.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2074) 
    at org.datanucleus.store.rdbms.adapter.BaseDatastoreAdapter.getColumns(BaseDatastoreAdapter.java:1532) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:921) 
    ... 132 more 
Nested Throwables StackTrace: 
java.sql.SQLException: Column name pattern can not be NULL or empty. 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:545) 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:513) 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:505) 
    at com.mysql.cj.jdbc.exceptions.SQLError.createSQLException(SQLError.java:479) 
    at com.mysql.cj.jdbc.DatabaseMetaData.getColumns(DatabaseMetaData.java:2074) 
    at org.datanucleus.store.rdbms.adapter.BaseDatastoreAdapter.getColumns(BaseDatastoreAdapter.java:1532) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.refreshTableData(RDBMSSchemaHandler.java:921) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:821) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getRDBMSTableInfoForTable(RDBMSSchemaHandler.java:770) 
    at org.datanucleus.store.rdbms.schema.RDBMSSchemaHandler.getSchemaData(RDBMSSchemaHandler.java:206) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getColumnInfoForTable(RDBMSStoreManager.java:2378) 
    at org.datanucleus.store.rdbms.table.TableImpl.validateColumns(TableImpl.java:215) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.performTablesValidation(RDBMSStoreManager.java:3393) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.addClassTablesAndValidate(RDBMSStoreManager.java:3190) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager$ClassAdder.run(RDBMSStoreManager.java:2841) 
    at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:122) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:1605) 
    at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:954) 
    at org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:679) 
    at org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:408) 
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:947) 
    at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:370) 
    at org.datanucleus.store.query.Query.executeQuery(Query.java:1744) 
    at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672) 
    at org.datanucleus.store.query.Query.execute(Query.java:1654) 
    at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221) 
    at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:184) 
    at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137) 
    at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295) 
    at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258) 
    at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 
    at org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57) 
    at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461) 
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 
    at org.apache.spark.sql.hive.client.HiveClientImpl.<init>(HiveClientImpl.scala:188) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:264) 
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:358) 
    at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:262) 
    at org.apache.spark.sql.hive.HiveExternalCatalog.<init>(HiveExternalCatalog.scala:66) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.spark.sql.internal.SharedState$.org$apache$spark$sql$internal$SharedState$$reflect(SharedState.scala:166) 
    at org.apache.spark.sql.internal.SharedState.<init>(SharedState.scala:86) 
    at org.apache.spark.sql.SparkSession$$anonfun$sharedState$1.apply(SparkSession.scala:101) 
    at org.apache.spark.sql.SparkSession$$anonfun$sharedState$1.apply(SparkSession.scala:101) 
    at scala.Option.getOrElse(Option.scala:121) 
    at org.apache.spark.sql.SparkSession.sharedState$lzycompute(SparkSession.scala:101) 
    at org.apache.spark.sql.SparkSession.sharedState(SparkSession.scala:100) 
    at org.apache.spark.sql.internal.SessionState.<init>(SessionState.scala:157) 
    at org.apache.spark.sql.hive.HiveSessionState.<init>(HiveSessionState.scala:32) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at org.apache.spark.sql.SparkSession$.org$apache$spark$sql$SparkSession$$reflect(SparkSession.scala:978) 
    at org.apache.spark.sql.SparkSession.sessionState$lzycompute(SparkSession.scala:110) 
    at org.apache.spark.sql.SparkSession.sessionState(SparkSession.scala:109) 
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:878) 
    at org.apache.spark.sql.SparkSession$Builder$$anonfun$getOrCreate$5.apply(SparkSession.scala:878) 
    at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) 
    at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:99) 
    at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230) 
    at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40) 
    at scala.collection.mutable.HashMap.foreach(HashMap.scala:99) 
    at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:878) 
    at org.apache.spark.repl.Main$.createSparkSession(Main.scala:96) 
    at $line3.$read$$iw$$iw.<init>(<console>:15) 
    at $line3.$read$$iw.<init>(<console>:42) 
    at $line3.$read.<init>(<console>:44) 
    at $line3.$read$.<init>(<console>:48) 
    at $line3.$read$.<clinit>(<console>) 
    at $line3.$eval$.$print$lzycompute(<console>:7) 
    at $line3.$eval$.$print(<console>:6) 
    at $line3.$eval.$print(<console>) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:498) 
    at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786) 
    at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047) 
    at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638) 
    at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637) 
    at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) 
    at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19) 
    at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637) 
    at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569) 
    at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565) 
    at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807) 
    at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681) 
    at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395) 
    at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV$sp(SparkILoop.scala:38) 
    at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) 
    at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) 
    at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:214) 
    at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:37) 
    at org.apache.spark.repl.SparkILoop.loadFiles(SparkILoop.scala:105) 
    at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:920) 
    at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) 
    at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) 
    at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97) 
    at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909) 
    at org.apache.spark.repl.Main$.doMain(Main.scala:69) 
    at org.apache.spark.repl.Main$.main(Main.scala:52) 
    at org.apache.spark.repl.Main.main(Main.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:498) 
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743) 
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187) 
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212) 
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126) 
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 
+0

スパークでどのようにメタストアを定義しましたか?スパーク-defaults.confに #JDBCで –

+0

は、JDBCの文字列は、メタストア spark.hadoop.javax.jdo.option.ConnectionURLのJDBCを接続します。mysql:// localhostを:?に対して使用する32775/metastoredb createDatabaseIfNotExist =真 #ユーザー名メタストアデータベース spark.hadoop.javax.jdo.option.ConnectionUserNameルート #パスワードは、JDBC用 #ドライバのクラス名が spark.hadoopをメタストア spark.hadoop.javax.jdo.option.ConnectionPasswordメタストアデータベースに対して使用します.javax.jdo.option.ConnectionDriverName com.mysql.jdbc.Driver –

+0

(https://stackoverflow.com/questions/45098841/hive-metastore-creation-error-on-spark-2-1-1を参照) mysqlドライバのバージョンを変更しても問題は解決します:6.0.6から5.1.34へ。 –

答えて

1

この回答に見られるように:https://stackoverflow.com/a/45105368/1437346、私は、MySQLドライバのバージョンを変更するにはテストしてみた:6.0.6から5.1.34に。

これで動作します。

+0

あなた自身の質問に答えてくれてありがとう - これは他人を助けるかもしれません! – MaxU

関連する問題