本文仅仅介绍通过java api访问hive的数据。
本文依赖hive环境可用,特别是HiveServer2。
本文比较简单,仅仅为示例。
一、pom.xml
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>3.1.2</version>
</dependency>
二、java 类
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import lombok.extern.slf4j.Slf4j;
/**
* JDBC 操作 Hive
*
*/
@Slf4j
public class App {
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String url = "jdbc:hive2://server4:10000/default";// default 是默認數據庫名稱
private static String user = "alanchan";// hadoop中可以訪問hdfs的用戶
private static String password = "123456";// 該用戶的密碼
private static Connection conn = null;
private static Statement stmt = null;
private static ResultSet rs = null;
@Before
public void init() throws Exception {
Class.forName(driverName);
conn = DriverManager.getConnection(url, user, password);
stmt = conn.createStatement();
}
// 创建数据库
@Test
public void createDatabase() throws Exception {
String sql = "create database test";
log.info("sql:{}", sql);
stmt.execute(sql);
}
// 查询所有数据库
@Test
public void showDatabases() throws Exception {
String sql = "show databases";
log.info("sql:{}", sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1));
}
}
// 创建表
@Test
public void createTable() throws Exception {
String sql = "create table test (id int ,name string) row format delimited fields terminated by '\\t' ";
log.info("sql:{}", sql);
stmt.execute(sql);
}
// 查询所有表
@Test
public void showTables() throws Exception {
String sql = "show tables";
log.info("sql:{}", sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1));
}
}
// 查看表结构
@Test
public void descTable() throws Exception {
String sql = "desc emp";
log.info("sql:{}", sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getString(1) + "\t" + rs.getString(2));
}
}
// 加载数据
@Test
public void loadData() throws Exception {
String filePath = "/home/hadoop/data/emp.txt";
String sql = "load data local inpath '" + filePath + "' overwrite into table test";
log.info("sql:{}", sql);
stmt.execute(sql);
}
// 查询数据
@Test
public void selectData() throws Exception {
String sql = "select * from test";
log.info("sql:{}", sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getInt("id") + "\t\t" + rs.getString("name"));
}
}
// 统计查询(会运行mapreduce作业)
@Test
public void countData() throws Exception {
String sql = "select count(1) from test";
log.info("sql:{}", sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
System.out.println(rs.getInt(1));
}
}
// 删除数据库
@Test
public void dropDatabase() throws Exception {
String sql = "drop database if exists test";
log.info("sql:{}", sql);
stmt.execute(sql);
}
// 删除数据库表
@Test
public void deopTable() throws Exception {
String sql = "drop table if exists test";
log.info("sql:{}", sql);
stmt.execute(sql);
}
// 释放资源
@After
public void destory() throws Exception {
if (rs != null) {
rs.close();
}
if (stmt != null) {
stmt.close();
}
if (conn != null) {
conn.close();
}
}
}
免责申明:
本文系转载,版权归原作者所有,如若侵权请联系我们进行删除!
《数据治理行业实践白皮书》下载地址:https://fs80.cn/4w2atu
《数栈V6.0产品白皮书》下载地址:https://fs80.cn/cw0iw1
想了解或咨询更多有关袋鼠云大数据产品、行业解决方案、客户案例的朋友,浏览袋鼠云官网:https://www.dtstack.com/?src=bbs
同时,欢迎对大数据开源项目有兴趣的同学加入「袋鼠云开源框架钉钉技术群」,交流最新开源技术信息,群号码:30537511,项目地址:https://github.com/DTStack