hiveserver2 jdbc 发表于 2018-01-28 | 分类于 hive 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185import org.apache.log4j.Logger;import java.io.BufferedWriter;import java.io.FileWriter;import java.io.IOException;import java.sql.*;public class HiveJdbcCli { //网上写 org.apache.hadoop.hive.jdbc.HiveDriver ,新版本不能这样写 private static String driverName = "org.apache.hive.jdbc.HiveDriver"; //这里是hive2,网上其他人都写hive,在高版本中会报错// private static String url = "jdbc:hive2://master:10000/default"; private static String url = "jdbc:hive2://xxxxx/intercarhive;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=datahiveserver2_zk"; private static String user = "xxxxx"; private static String password = "xxxxxx"; private static String sql = ""; private static ResultSet res; private static final Logger log = Logger.getLogger(HiveJdbcCli.class); public static void main(String[] args) { Connection conn = null; Statement stmt = null; try { conn = getConn(); stmt = conn.createStatement(); // 第一步:存在就先删除 String tableName = dropTable(stmt); stmt.close(); stmt = conn.createStatement(); // 第二步:不存在就创建 createTable(stmt, tableName); stmt.close(); stmt = conn.createStatement(); // 第三步:查看创建的表 showTables(stmt, tableName); stmt.close(); stmt = conn.createStatement(); // 执行describe table操作 describeTables(stmt, tableName); stmt.close(); stmt = conn.createStatement(); selectData2File(stmt, " pf_test.hdfs_fsimage_file_orc", "/d/test/fsimage.txt"); stmt.close(); stmt = conn.createStatement();// // 执行load data into table操作// loadData(stmt, tableName);//// // 执行 select * query 操作// selectData(stmt, tableName);//// // 执行 regular hive query 统计操作// countData(stmt, tableName); } catch (ClassNotFoundException e) { e.printStackTrace(); log.error(driverName + " not found!", e); System.exit(1); } catch (SQLException e) { e.printStackTrace(); log.error("Connection error!", e); System.exit(1); } finally { try { if (conn != null) { conn.close(); conn = null; } if (stmt != null) { stmt.close(); stmt = null; } } catch (SQLException e) { e.printStackTrace(); } } } private static void countData(Statement stmt, String tableName) throws SQLException { sql = "select count(1) from " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行“regular hive query”运行结果:"); while (res.next()) { System.out.println("count ------>" + res.getString(1)); } } private static void selectData(Statement stmt, String tableName) throws SQLException { sql = "select * from " + tableName + " limit 100"; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行 select * query 运行结果:"); while (res.next()) { System.out.println(res.getString(1) + "\t" + res.getString(2)); } } private static void selectData2File(Statement stmt, String tableName, String fileName) { sql = "select * from " + tableName + " limit 10000000"; System.out.println("Running:" + sql); try { res = stmt.executeQuery(sql); System.out.println("执行 select * query 运行结果:"); BufferedWriter bw = new BufferedWriter(new FileWriter(fileName)); ResultSetMetaData rsmd = res.getMetaData(); int columnsNumber = rsmd.getColumnCount(); while (res.next()) {// System.out.println(res.getString(1) + "\t" + res.getString(2)); for (int i = 1; i <= columnsNumber; i++) { bw.write(res.getString(i) + "\t"); } bw.write("\n"); } bw.close(); } catch (SQLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } } private static void loadData(Statement stmt, String tableName) throws SQLException { //目录 ,我的是hive安装的机子的虚拟机的home目录下 String filepath = "user.txt"; sql = "load data local inpath '" + filepath + "' into table " + tableName; System.out.println("Running:" + sql); stmt.execute(sql); } private static void describeTables(Statement stmt, String tableName) throws SQLException { sql = "describe " + tableName; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行 describe table 运行结果:"); while (res.next()) { System.out.println(res.getString(1) + "\t" + res.getString(2)); } } private static void showTables(Statement stmt, String tableName) throws SQLException { sql = "show tables '" + tableName + "'"; System.out.println("Running:" + sql); res = stmt.executeQuery(sql); System.out.println("执行 show tables 运行结果:"); if (res.next()) { System.out.println(res.getString(1)); } } private static void createTable(Statement stmt, String tableName) throws SQLException { sql = "create table " + tableName + " (key int, value string) row format delimited fields terminated by '\t'"; stmt.execute(sql); } private static String dropTable(Statement stmt) throws SQLException { // 创建的表名 String tableName = "testHive"; sql = "drop table if exists " + tableName; stmt.execute(sql); return tableName; } private static Connection getConn() throws ClassNotFoundException, SQLException { Class.forName(driverName); Connection conn = DriverManager.getConnection(url, user, password); return conn; }} 1234567891011121314151617<dependencies> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>1.2.2</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.2.0</version> </dependency> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>5.1.45</version> </dependency> </dependencies>