频道栏目
首页 > 资讯 > 云计算 > 正文

Java使用JDBC连接Hive(新版本)API封装

16-05-28        来源:[db:作者]  
收藏   我要投稿

网上找了很多封装的API,发现都是过时了的,运行报各种错误,经过了几天的调错,终于可以使用java代码操作hive了

首先看看所需的包

这里写图片描述

所有的分析都在代码里面

注意:网上很多代码对于DDL都执行 res = stmt.executeQuery(sql);
这是错的,因为新版本DDL不能返回结果集,会报如下错误
java.sql.SQLException: The query did not generate a result set!
所以只能写 stmt.execute(sql);
它会返回一个boolean值
只有对于DML才能返回结果集
具体看下面的代码大家就懂了,不信的话大家可以试试,我的是1.1.1版本

package com.berg.hive.test1.api;

import java.sql.Connection;  
import java.sql.DriverManager;  
import java.sql.ResultSet;  
import java.sql.SQLException;  
import java.sql.Statement;  

import org.apache.log4j.Logger;  

/** 
 * Hive的JavaApi 
 *  
 * 启动hive的远程服务接口命令行执行:hive --service hiveserver & 
 *  
 * @author 汤高 
 *  
 */  
public class HiveJdbcCli {  
    //网上写 org.apache.hadoop.hive.jdbc.HiveDriver ,新版本不能这样写
    private static String driverName = "org.apache.hive.jdbc.HiveDriver";  

  //这里是hive2,网上其他人都写hive,在高版本中会报错
    private static String url = "jdbc:hive2://master:10000/default"; 
    private static String user = "hive";  
    private static String password = "hive";  
    private static String sql = "";  
    private static ResultSet res;  
    private static final Logger log = Logger.getLogger(HiveJdbcCli.class);  

    public static void main(String[] args) {  
        Connection conn = null;  
        Statement stmt = null;  
        try {  
            conn = getConn();  
            stmt = conn.createStatement();  

            // 第一步:存在就先删除  
            String tableName = dropTable(stmt);  

            // 第二步:不存在就创建  
            createTable(stmt, tableName);  

            // 第三步:查看创建的表  
            showTables(stmt, tableName);  

            // 执行describe table操作  
            describeTables(stmt, tableName);  

            // 执行load data into table操作  
            loadData(stmt, tableName);  

            // 执行 select * query 操作  
            selectData(stmt, tableName);  

            // 执行 regular hive query 统计操作  
            countData(stmt, tableName);  

        } catch (ClassNotFoundException e) {  
            e.printStackTrace();  
            log.error(driverName + " not found!", e);  
            System.exit(1);  
        } catch (SQLException e) {  
            e.printStackTrace();  
            log.error("Connection error!", e);  
            System.exit(1);  
        } finally {  
            try {  
                if (conn != null) {  
                    conn.close();  
                    conn = null;  
                }  
                if (stmt != null) {  
                    stmt.close();  
                    stmt = null;  
                }  
            } catch (SQLException e) {  
                e.printStackTrace();  
            }  
        }  
    }  

    private static void countData(Statement stmt, String tableName)  
            throws SQLException {  
        sql = "select count(1) from " + tableName;  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行“regular hive query”运行结果:");  
        while (res.next()) {  
            System.out.println("count ------>" + res.getString(1));  
        }  
    }  

    private static void selectData(Statement stmt, String tableName)  
            throws SQLException {  
        sql = "select * from " + tableName;  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行 select * query 运行结果:");  
        while (res.next()) {  
            System.out.println(res.getInt(1) + "\t" + res.getString(2));  
        }  
    }  

    private static void loadData(Statement stmt, String tableName)  
            throws SQLException {  
        //目录 ,我的是hive安装的机子的虚拟机的home目录下
        String filepath = "user.txt";  
        sql = "load data local inpath '" + filepath + "' into table "  
                + tableName;  
        System.out.println("Running:" + sql);  
         stmt.execute(sql);  
    }  

    private static void describeTables(Statement stmt, String tableName)  
            throws SQLException {  
        sql = "describe " + tableName;  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行 describe table 运行结果:");  
        while (res.next()) {  
            System.out.println(res.getString(1) + "\t" + res.getString(2));  
        }  
    }  

    private static void showTables(Statement stmt, String tableName)  
            throws SQLException {  
        sql = "show tables '" + tableName + "'";  
        System.out.println("Running:" + sql);  
        res = stmt.executeQuery(sql);  
        System.out.println("执行 show tables 运行结果:");  
        if (res.next()) {  
            System.out.println(res.getString(1));  
        }  
    }  

    private static void createTable(Statement stmt, String tableName)  
            throws SQLException {  
        sql = "create table "  
                + tableName  
                + " (key int, value string)  row format delimited fields terminated by '\t'";  
        stmt.execute(sql);  
    }  

    private static String dropTable(Statement stmt) throws SQLException {  
        // 创建的表名  
        String tableName = "testHive";  
        sql = "drop table  " + tableName;  
        stmt.execute(sql);  
        return tableName;  
    }  

    private static Connection getConn() throws ClassNotFoundException,  
            SQLException {  
        Class.forName(driverName);  
        Connection conn = DriverManager.getConnection(url, user, password);  
        return conn;  
    }  

}  
相关TAG标签
上一篇:Hadoop学习:简介、Ubuntu上安装配置与wordcount示例
下一篇:Hive安装配置详解
相关文章
图文推荐

关于我们 | 联系我们 | 广告服务 | 投资合作 | 版权申明 | 在线帮助 | 网站地图 | 作品发布 | Vip技术培训 | 举报中心

版权所有: 红黑联盟--致力于做实用的IT技术学习网站