2016-11-29 7 views
0

spark sqlでscalaでハイブに接続してテスト用のデータを選択しようとしましたが、失敗しました。spark sqlでscalaを介してハイブに接続できません

object LoadHive { 
def main(args: Array[String]) { 
if (args.length < 2) { 
    println("Usage: [sparkmaster] [tablename]") 
    exit(1) 
} 
    val master = args(0) 
    val tableName = args(1) 
    val sc = new SparkContext(master, "LoadHive", System.getenv("SPARK_HOME")) 
    val hiveCtx = new HiveContext(sc) 
    val input = hiveCtx.sql("FROM src SELECT key, value") 
    val data = input.map(_.getInt(0)) 
    println(data.collect().toList) 
} 
} 

、次のように例外ログがある:私が使用するコードは次のようである

16/11/28 07:25:34 WARN Hive: Failed to access metastore. This class should not accessed in runtime. 
org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient 
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1236) 
    at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174) 
    at org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166) 
    at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503) 
    at org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:204) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526) 
    at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:249) 
    at org.apache.spark.sql.hive.HiveContext.metadataHive$lzycompute(HiveContext.scala:327) 
    at org.apache.spark.sql.hive.HiveContext.metadataHive(HiveContext.scala:237) 
    at org.apache.spark.sql.hive.HiveContext.setConf(HiveContext.scala:441) 
    at org.apache.spark.sql.hive.HiveContext.defaultOverrides(HiveContext.scala:226) 
    at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:229) 
    at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101) 
    at com.oreilly.learningsparkexamples.scala.LoadHive$.main(LoadHive.scala:19) 
    at com.oreilly.learningsparkexamples.scala.LoadHive.main(LoadHive.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731) 
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181) 
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206) 
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121) 
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 
Caused by: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient 
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1523) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132) 
    at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104) 
    at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005) 
    at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024) 
    at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234) 
    ... 26 more 
Caused by: java.lang.reflect.InvocationTargetException 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526) 
    at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521) 
    ... 32 more 
Caused by: java.lang.NoSuchMethodError: org.apache.thrift.EncodingUtils.setBit(BIZ)B 
    at org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo.setCreateTimeIsSet(PrivilegeGrantInfo.java:245) 
    at org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo.<init>(PrivilegeGrantInfo.java:163) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles_core(HiveMetaStore.java:675) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultRoles(HiveMetaStore.java:645) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:462) 
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66) 
    at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72) 
    at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762) 
    at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199) 
    at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74) 
    ... 37 more 

私はこのことについて非常に混乱し、誰もがこの問題を満たしていますか?この問題を解決するために、ハイブを使ってスパークを構築する必要がありますか?スパークはすでにハイブをサポートしていると思います。私が使用するスパークバージョンは1.6.1です。どんな助けもありがとう!そのスパークを確認するために

答えて

0

は、任意のSQLクエリを

./bin/spark-sql --master local[*] 

を実行し、実行ハイブと協力して(SRCから例えばSELECT COUNT(1);)スパークは、ハイブサポートで構築されなければならないことを

注意クラスパスにはhive-site.xmlが存在します。

+0

私はすでにこの問題を解決しています。クラスパス内の複数の倹約版が原因です。でも、ありがとう。 – Coinnigh

関連する問題