hiveserver2 jdbc

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
import org.apache.log4j.Logger;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.*;


public class HiveJdbcCli {
//网上写 org.apache.hadoop.hive.jdbc.HiveDriver ,新版本不能这样写
private static String driverName = "org.apache.hive.jdbc.HiveDriver";

//这里是hive2,网上其他人都写hive,在高版本中会报错
// private static String url = "jdbc:hive2://master:10000/default";
private static String url = "jdbc:hive2://xxxxx/intercarhive;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=datahiveserver2_zk";
private static String user = "xxxxx";
private static String password = "xxxxxx";
private static String sql = "";
private static ResultSet res;
private static final Logger log = Logger.getLogger(HiveJdbcCli.class);

public static void main(String[] args) {
Connection conn = null;
Statement stmt = null;
try {
conn = getConn();
stmt = conn.createStatement();

// 第一步:存在就先删除
String tableName = dropTable(stmt);
stmt.close();
stmt = conn.createStatement();
// 第二步:不存在就创建
createTable(stmt, tableName);
stmt.close();
stmt = conn.createStatement();
// 第三步:查看创建的表
showTables(stmt, tableName);
stmt.close();
stmt = conn.createStatement();
// 执行describe table操作
describeTables(stmt, tableName);
stmt.close();
stmt = conn.createStatement();

selectData2File(stmt, " pf_test.hdfs_fsimage_file_orc", "/d/test/fsimage.txt");
stmt.close();
stmt = conn.createStatement();
// // 执行load data into table操作
// loadData(stmt, tableName);
//
// // 执行 select * query 操作
// selectData(stmt, tableName);
//
// // 执行 regular hive query 统计操作
// countData(stmt, tableName);

} catch (ClassNotFoundException e) {
e.printStackTrace();
log.error(driverName + " not found!", e);
System.exit(1);
} catch (SQLException e) {
e.printStackTrace();
log.error("Connection error!", e);
System.exit(1);
} finally {
try {
if (conn != null) {
conn.close();
conn = null;
}
if (stmt != null) {
stmt.close();
stmt = null;
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}

private static void countData(Statement stmt, String tableName)
throws SQLException {
sql = "select count(1) from " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行“regular hive query”运行结果:");
while (res.next()) {
System.out.println("count ------>" + res.getString(1));
}
}

private static void selectData(Statement stmt, String tableName)
throws SQLException {
sql = "select * from " + tableName + " limit 100";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 select * query 运行结果:");

while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}

private static void selectData2File(Statement stmt, String tableName, String fileName) {
sql = "select * from " + tableName + " limit 10000000";
System.out.println("Running:" + sql);

try {
res = stmt.executeQuery(sql);
System.out.println("执行 select * query 运行结果:");
BufferedWriter bw = new BufferedWriter(new FileWriter(fileName));
ResultSetMetaData rsmd = res.getMetaData();
int columnsNumber = rsmd.getColumnCount();

while (res.next()) {
// System.out.println(res.getString(1) + "\t" + res.getString(2));
for (int i = 1; i <= columnsNumber; i++) {
bw.write(res.getString(i) + "\t");
}
bw.write("\n");
}
bw.close();
} catch (SQLException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}

private static void loadData(Statement stmt, String tableName)
throws SQLException {
//目录 ,我的是hive安装的机子的虚拟机的home目录下
String filepath = "user.txt";
sql = "load data local inpath '" + filepath + "' into table "
+ tableName;
System.out.println("Running:" + sql);
stmt.execute(sql);
}

private static void describeTables(Statement stmt, String tableName)
throws SQLException {
sql = "describe " + tableName;
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 describe table 运行结果:");
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
}

private static void showTables(Statement stmt, String tableName)
throws SQLException {
sql = "show tables '" + tableName + "'";
System.out.println("Running:" + sql);
res = stmt.executeQuery(sql);
System.out.println("执行 show tables 运行结果:");
if (res.next()) {
System.out.println(res.getString(1));
}
}

private static void createTable(Statement stmt, String tableName)
throws SQLException {
sql = "create table "
+ tableName
+ " (key int, value string) row format delimited fields terminated by '\t'";
stmt.execute(sql);
}

private static String dropTable(Statement stmt) throws SQLException {
// 创建的表名
String tableName = "testHive";
sql = "drop table if exists " + tableName;
stmt.execute(sql);
return tableName;
}

private static Connection getConn() throws ClassNotFoundException,
SQLException {
Class.forName(driverName);
Connection conn = DriverManager.getConnection(url, user, password);
return conn;
}

}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
<dependencies>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.2</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.2.0</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.45</version>
</dependency>
</dependencies>