重要:在使用 JDBC 开发 Hive 程序时, 必须首先开启 Hive 的远程服务接口。使用下面命令进行开启:hive -service hiveserver &
1). 测试数据
userinfo.txt文件内容(每行数据之间用tab键隔开):
1 xiapi2 xiaoxue3 qingqing
2). 程序代码
1 packagecom.ljq.hive;import java.sql.Connection; 2 import java.sql.DriverManager;import java.sql.ResultSet; 3 import java.sql.SQLException;import java.sql.Statement; 4 import org.apache.log4j.Logger; 5 public class HiveJdbcClient { 6 private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver"; 7 private staticString url = "jdbc:hive://192.168.11.157:10000/default"; 8 private staticString user = "hive"; 9 private static String password = "mysql"; 10 private staticString sql = ""; 11 private static ResultSet res; 12 private static finalLogger log = Logger.getLogger(HiveJdbcClient.class); 13 public static voidmain(String[] args) { 14 try { 15 Class.forName(driverName); 16 Connection conn = DriverManager.getConnection(url,user, password); 17 Statement stmt =conn.createStatement(); 18 // 创建的表名 19 String tableName ="testHiveDriverTable"; 20 /** 第一步:存在就先删除 **/ 21 sql = "drop table" + tableName; 22 stmt.executeQuery(sql); 23 /** 第二步:不存在就创建 **/ 24 sql = "createtable " + tableName + " (key int, value string) row format delimited fields terminated by‘\t‘"; 25 stmt.executeQuery(sql); 26 // 执行“show tables”操作 27 sql = "show tables‘" + tableName + "‘"; 28 System.out.println("Running:" + sql); 29 res =stmt.executeQuery(sql); 30 System.out.println("执行“showtables”运行结果:"); 31 if (res.next()) { 32 System.out.println(res.getString(1)); 33 } 34 // 执行“describe table”操作 35 sql = "describe" + tableName; 36 System.out.println("Running:" + sql); 37 res =stmt.executeQuery(sql); 38 System.out.println("执行“describetable”运行结果:"); 39 while (res.next()) { 40 System.out.println(res.getString(1) + "\t" +res.getString(2)); 41 } 42 // 执行“load data into table”操作 43 String filepath ="/home/hadoop/ziliao/userinfo.txt"; 44 sql = "load datalocal inpath ‘" + filepath + "‘ into table " + tableName; 45 System.out.println("Running:" + sql); 46 res =stmt.executeQuery(sql); 47 // 执行“select * query”操作 48 sql = "select *from " + tableName; 49 System.out.println("Running:" + sql); 50 res = stmt.executeQuery(sql); 51 System.out.println("执行“select* query”运行结果:"); 52 while (res.next()) { 53 System.out.println(res.getInt(1) + "\t" + res.getString(2)); 54 } 55 // 执行“regular hive query”操作 56 sql = "selectcount(1) from " + tableName; 57 System.out.println("Running:" + sql); 58 res =stmt.executeQuery(sql); 59 System.out.println("执行“regularhive query”运行结果:"); 60 while (res.next()) { 61 System.out.println(res.getString(1)); 62 } 63 conn.close(); 64 conn = null; 65 } catch (ClassNotFoundException e) { 66 e.printStackTrace(); 67 log.error(driverName +" not found!", e); 68 System.exit(1); 69 } catch (SQLException e) { 70 e.printStackTrace(); 71 log.error("Connection error!", e); 72 System.exit(1); 73 } 74 } 75 }
3). 运行结果(右击-->Run as-->Run on Hadoop)
Running:show tables‘testHiveDriverTable‘执行“show tables”运行结果:
testhivedrivertable
Running:describe testHiveDriverTable
执行“describe table”运行结果:
key intvalue string
Running:load data local inpath‘/home/hadoop/ziliao/userinfo.txt‘ into table testHiveDriverTable
Running:select * fromtestHiveDriverTable
执行“select * query”运行结果:1 xiapi2 xiaoxue3 qingqing
Running:select count(1) fromtestHiveDriverTable
执行“regular hive query”运行结果:3
时间: 2024-10-09 02:16:25