2016-06-13 8 views
0

hiber-1.2.1とhbase-0.98.19を統合する例を取り上げています。私は ハイブINSERT OVERWRITEのエラー表示

CREATE TABLE hbase_table_emp(id int, name string, role string) 
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler' WITH SERDEPROPERTIES ("hbase.columns.mapping" =  ":key,cf1:name,cf1:role") 
TBLPROPERTIES ("hbase.table.name" = "emp"); 

は、その後、私は「hbase_table_emp」にデータをインポートするための「TESTEMP」を作成したコマンドを使用してのHBaseのテーブルを作成しました。以下のコードは、 'testemp'テーブルを作成する方法を示しています。

create table testemp(id int, name string, role string) row format delimited fields terminated by '\t'; 
load data local inpath '/home/hduser/sample.txt' into table testemp; 
select * from testemp; 

これまではすべて正常に動作しています。私は、コマンドinsert overwrite table hbase_table_emp select * from testemp;

を実行したときに、私は次のエラーを取得する: -

hive> insert overwrite table hbase_table_emp select * from testemp; Query ID = hduser_20160613131557_ddef0b47-a773-477b-94d2-5cc070eb0de6 Total jobs = 1 Launching Job 1 out of 1 Number of reduce tasks is set to 0 since there's no reduce operator java.io.IOException: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.IllegalArgumentException: Must specify table name at org.apache.hadoop.hive.ql.exec.FileSinkOperator.checkOutputSpecs(FileSinkOperator.java:1117) at org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl.checkOutputSpecs(HiveOutputFormatImpl.java:67) at org.apache.hadoop.mapreduce.JobSubmitter.checkSpecs(JobSubmitter.java:564) at org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:432) at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1296) at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1293) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628) at org.apache.hadoop.mapreduce.Job.submit(Job.java:1293) at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:562) at org.apache.hadoop.mapred.JobClient$1.run(JobClient.java:557) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1628) at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:557) at org.apache.hadoop.mapred.JobClient.submitJob(JobClient.java:548) at org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:431) at org.apache.hadoop.hive.ql.exec.mr.MapRedTask.execute(MapRedTask.java:137) at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:160) at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:88) at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:1653) at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1412) at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) at org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:213) at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:165) at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:376) at org.apache.hadoop.hive.cli.CliDriver.executeDriver(CliDriver.java:736) at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:681) at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.hadoop.util.RunJar.run(RunJar.java:221) at org.apache.hadoop.util.RunJar.main(RunJar.java:136) Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.IllegalArgumentException: Must specify table name at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createHiveOutputFormat(FileSinkOperator.java:1139) at org.apache.hadoop.hive.ql.exec.FileSinkOperator.checkOutputSpecs(FileSinkOperator.java:1114) ... 37 more Caused by: java.lang.IllegalArgumentException: Must specify table name at org.apache.hadoop.hbase.mapreduce.TableOutputFormat.setConf(TableOutputFormat.java:193) at org.apache.hive.common.util.ReflectionUtil.setConf(ReflectionUtil.java:101) at org.apache.hive.common.util.ReflectionUtil.newInstance(ReflectionUtil.java:87) at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveOutputFormat(HiveFileFormatUtils.java:277) at org.apache.hadoop.hive.ql.io.HiveFileFormatUtils.getHiveOutputFormat(HiveFileFormatUtils.java:267) at org.apache.hadoop.hive.ql.exec.FileSinkOperator.createHiveOutputFormat(FileSinkOperator.java:1137) ... 38 more Job Submission failed with exception 'java.io.IOException(org.apache.hadoop.hive.ql.metadata.HiveException: java.lang.IllegalArgumentException: Must specify table name)' FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.mr.MapRedTask `

PS:私はhbase.jar、zookeeper.jarを持っているし、CLASSPATHに含まguava.jar。

ありがとうございます。

+0

私も同じ問題に直面しています。あなたはこれを解決することができますか? –

答えて

0

Hive HBaseの統合では、hbaseテーブルにデータを挿入するために、TBLPROPERTIESにhbase.mapred.output.outputtableも指定する必要があります。

Hive HBase Integration

The hbase.mapred.output.outputtable property is optional; it's needed if you plan to insert data to the table (the property is used by hbase.mapreduce.TableOutputFormat)

だからあなたのテーブルのために次のことを実行する必要があります:

ALTER TABLE table_name SET TBLPROPERTIES ("hbase.mapred.output.outputtable" = "emp"); 
関連する問題