使用Hive JDBC执行程序时出错

我为Hive JDBC编写了这个小程序。 最初它正在执行正常,但是当我试图突然运行时,我遇到了错误。

程序:

import java.io.FileWriter; import java.io.InputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; public class SampleHiveProgram { String lyear=""; String lquarter=""; String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver"; public static void main(String[] args) { SampleHiveProgram s=new SampleHiveProgram(); s.startHiveThriftServer(); s.quarterTable(); } public void startHiveThriftServer() { try { String cmd = "/home/hadoop/sqoop-1.3.0-cdh3u1/bin/StartHiveThriftServer.sh"; // this is the command to execute in the Unix shell // create a process for the shell ProcessBuilder pb = new ProcessBuilder("bash", "-c", cmd); pb.redirectErrorStream(true); // use this to capture messages sent to stderr Process shell = pb.start(); InputStream shellIn = shell.getInputStream(); // this captures the output from the command // wait for the shell to finish and get the return code // at this point you can process the output issued by the command // for instance, this reads the output and writes it to System.out: int c; while ((c = shellIn.read()) != -1) { System.out.write(c); } int shellExitStatus = shell.waitFor(); // close the stream shellIn.close(); } catch(Exception e) { e.printStackTrace(); System.exit(1); } } public void quarterTable() { try { String start="2010-01-01"; String end="2011-01-01"; System.out.println("in quarter table..."); //create connection with database Class.forName(driverName); Connection con = DriverManager.getConnection("jdbc:hive://localhost:10000/default", "", ""); String sql=null; Statement stmt = con.createStatement(); ResultSet res=null; sql="drop table TmpQuarterTable"; System.out.println("Dropping the Quarter Table..."); res = stmt.executeQuery(sql); //Creating Quarter Table sql="create table TmpQuarterTable (year string, quarter string, quarterstart string, quarterend string, quartername string)" + " ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\012' STORED AS TEXTFILE "; System.out.println("Creating the Quarter Table..."); res = stmt.executeQuery(sql); //create the file FileWriter fw=new FileWriter("/home/hadoop/Quarter.txt"); //convert string date to calendar date DateFormat formatter =new SimpleDateFormat("yyyy-MM-dd"); Date sdate=(Date)formatter.parse(start); Date edate=(Date)formatter.parse(end); Calendar c1=Calendar.getInstance(); Calendar c2=Calendar.getInstance(); c1.setTime(sdate); c2.setTime(edate); int q=0; String QuarterEndDate=null; int resultMonthCount=0; int resultYear =0; int resultMonth =0; Calendar c3=Calendar.getInstance(); c3.setTime(c1.getTime()); while(c3.compareTo(c2)=0 && c3.get(Calendar.MONTH)=3 && c3.get(Calendar.MONTH)=6 && c3.get(Calendar.MONTH)=9 && c3.get(Calendar.MONTH)<=11) { q=4; QuarterEndDate=Integer.toString(c3.get(Calendar.YEAR)+1)+"-01-01"; } //Got the QuarterEndDate (YYYY-MM-DD) //split the QuarterEndDate into qdate and create quarter_end_date String[] qdate=QuarterEndDate.split("-"); Calendar quarter_end_date=Calendar.getInstance(); quarter_end_date.set(Integer.parseInt(qdate[0]),Integer.parseInt(qdate[1]),Integer.parseInt(qdate[2])); System.out.println("quarter_end_date : "+quarter_end_date); //YY String YY=Integer.toString(c3.get(Calendar.YEAR)); //quarter start date = quarter end date - 1 Calendar quarter_start_date=Calendar.getInstance(); quarter_start_date.set(quarter_end_date.get(Calendar.YEAR),quarter_end_date.get(Calendar.MONTH),quarter_end_date.get(Calendar.DATE)); quarter_start_date.add(Calendar.YEAR, -1); //year String year=Integer.toString(quarter_start_date.get(Calendar.YEAR)); System.out.println("year : "+year); //month String months=null; if(quarter_start_date.get(Calendar.MONTH)<10) months="0"+Integer.toString(quarter_start_date.get(Calendar.MONTH)); else months=Integer.toString(quarter_start_date.get(Calendar.MONTH)); System.out.println("month : "+months); //day String day=null; if(quarter_start_date.get(Calendar.DATE)> write to file String QuarterStartDate=year+"-"+months+"-"+day; String quarterName=YY+"\"Q\""+q; fw.write(YY+","+q+","+QuarterStartDate+","+QuarterEndDate+","+quarterName+"\n"); }//end of while fw.close(); String filepath = "/home/hadoop/Quarter.txt"; sql = "load data local inpath '" + filepath + "' overwrite into table TmpQuarterTable"; System.out.println("Running: " + sql); stmt.executeUpdate(sql); } catch(Exception e) { e.printStackTrace(); System.exit(1); } } } 

错误如下:线程“main”中的exceptionjava.lang.IncompatibleClassChangeError:class com.facebook.fb303.FacebookService $ Client将接口org.apache.thrift.TServiceClient作为超类

错误为:

 Exception in thread "main" java.lang.IncompatibleClassChangeError: class com.facebook.fb303.FacebookService$Client has interface org.apache.thrift.TServiceClient as super class at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at java.lang.ClassLoader.defineClass1(Native Method) at java.lang.ClassLoader.defineClassCond(ClassLoader.java:631) at java.lang.ClassLoader.defineClass(ClassLoader.java:615) at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:141) at java.net.URLClassLoader.defineClass(URLClassLoader.java:283) at java.net.URLClassLoader.access$000(URLClassLoader.java:58) at java.net.URLClassLoader$1.run(URLClassLoader.java:197) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:190) at java.lang.ClassLoader.loadClass(ClassLoader.java:306) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) at java.lang.ClassLoader.loadClass(ClassLoader.java:247) at org.apache.hadoop.hive.jdbc.HiveConnection.(HiveConnection.java:93) at org.apache.hadoop.hive.jdbc.HiveDriver.connect(HiveDriver.java:104) at java.sql.DriverManager.getConnection(DriverManager.java:582) at java.sql.DriverManager.getConnection(DriverManager.java:185) at SampleHiveProgram.quarterTable(SampleHiveProgram.java:64) at SampleHiveProgram.main(SampleHiveProgram.java:22) 

我是怎么了? 我添加了Hive所需的所有jar子。

你有没有什么机会可以使用不同版本的节俭和libthrift? 听起来这些类是使用一个版本的thrift生成的,但是尝试在运行时使用不同版本的libthrift。

当我尝试针对HIVE运行示例JDBC时,我遇到了同样的错误。 然后我将libfb * .jar的位置添加到类路径中,它对我来说很好。 我从JDBC执行中学到的是,它需要一大堆jar文件,需要在类路径中明确提到。 我的类路径看起来像这样(JDBC执行时绝对需要最后几个jar)

 [cloudera@quickstart ownjava]$ echo $CLASSPATH /etc/hadoop/conf:/usr/lib/hadoop/lib/*:/usr/lib/hadoop/.//*:/usr/lib/hadoop-hdfs/./:/usr/lib/hadoop-hdfs/lib/*:/usr/lib/hadoop-hdfs/.//*:/usr/lib/hadoop-yarn/lib/*:/usr/lib/hadoop-yarn/.//*:/usr/lib/hadoop-mapreduce/lib/*:/usr/lib/hadoop-mapreduce/.//*:/usr/lib/hive/lib/:/usr/lib/hive/lib/hive-exec.jar:/usr/lib/hive/lib/hive-exec-0.12.0-cdh5.1.0.jar:/usr/lib/hive/lib/hive-service.jar:/usr/lib/hive/lib/hive-service-0.12.0-cdh5.1.0.jar:/usr/lib/hive/lib/hive-metastore.jar:/usr/lib/hive/lib/hive-metastore-0.12.0-cdh5.1.0.jar:/home/cloudera/ownjava/:/usr/lib/hive/lib/hive-jdbc-0.12.0-cdh5.1.0.jar:/usr/lib/hive/lib/hive-jdbc.jar:/usr/lib/hive/lib/libfb303-0.9.0.jar:/usr/lib/hive/lib/jdo-api-3.0.1.jar:/usr/lib/hive/lib/antlr-runtime-3.4.jar