hive javaapi 002

时间:2023-03-09 21:43:08
hive javaapi 002
默认开启10000端口
开启前,编辑hive-site.xml设置impersonation,防止hdfs权限问题,这样hive server会以提交用户的身份去执行语句,如果设置为false,则会以起hive server daemon的admin user来执行语句
<property>
<name>hive.server2.enable.doAs</name>
<value>false</value>
</property>

pom.xml

<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.</modelVersion> <groupId>xinwei</groupId>
<artifactId>Hive</artifactId>
<version>0.0.-SNAPSHOT</version>
<packaging>jar</packaging> <name>Hive</name>
<url>http://maven.apache.org</url> <properties>
<project.build.sourceEncoding>UTF-</project.build.sourceEncoding>
</properties> <dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.</version>
<scope>test</scope>
</dependency> <dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>0.14.</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>2.4.</version>
</dependency>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
</dependency>
</dependencies>
</project>
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
//Hive0.11.0版本提供了一个全新的服务:HiveServer2,这个很好的解决HiveServer存在的安全性、并发性等问题。这个服务启动程序在${HIVE_HOME}/bin/hiveserver2里面,你可以通过下面的方式来启动HiveServer2服务:
//nohup ./hive --service hiveserver2 >/dev/null 2>1 &
//默认开启10000端口
//开启前,编辑hive-site.xml设置impersonation,防止hdfs权限问题,这样hive server会以提交用户的身份去执行语句,如果设置为false,则会以起hive server daemon的admin user来执行语句
public class HiveTest {
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://192.168.231.137:10000/default";
private static String username = "hadoop";
private static String password = "hadoop";
private static Connection conn = null;
private static Statement stmt = null;
private static String sql = "";
private static ResultSet res = null;
static {
try {
Class.forName(driverName);
conn = DriverManager.getConnection(url, username, password);
stmt = conn.createStatement();
} catch (Exception e) {
e.printStackTrace();
}
} public static void main(String[] args) throws Exception {
dropTable("hivetest");
createTable("hivetest");
showTables("hivetest");
describeTables("hivetest");
insert("hivetest", new String[]{"","tom",""});
insert("hivetest", new String[]{"","zhangshan",""});
insert("hivetest", new String[]{"","lisi",""});
insert("hivetest", new String[]{"","lucy",""});
selectData("hivetest");
// dropTable("hivetest");
} // 查询数据
public static void selectData(String tableName) throws SQLException {
sql = "select * from " + tableName;
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getInt() + "\t" + res.getString());
}
} // 添加数据
public static void insert(String tableName, String[] datas) throws SQLException {
sql = "insert into table " + tableName + " values ('" + datas[] + "','" + datas[] + "'," + Integer.valueOf(datas[]) + ")";
stmt.execute(sql);
} // 查询表结构
public static void describeTables(String tableName) throws SQLException {
sql = "describe " + tableName;
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString() + "\t" + res.getString());
}
} // 查看表
public static void showTables(String tableName) throws SQLException {
sql = "show tables '" + tableName + "'";
res = stmt.executeQuery(sql);
if (res.next()) {
System.out.println(res.getString());
}
} // 创建表
public static void createTable(String tableName) throws SQLException {
sql = "create table " + tableName + " (id string, name string,age int) row format delimited fields terminated by '\t'";
stmt.execute(sql);
} // 删除表
public static String dropTable(String tableName) throws SQLException {
// 创建的表名
sql = "drop table " + tableName;
stmt.execute(sql);
return tableName;
}
} //
//hivetest
//id string
//name string
//age int
//10000 tom
//10001 zhangshan
//10002 lisi
//10003 lucy