zoukankan      html  css  js  c++  java
  • 实战1——Hive与JDBC示例

    在使用 JDBC 开发 Hive 程序时,  必须首先开启 Hive 的远程服务接口。使用下面命令进行开启:

    hive -service hiveserver &

    1). 测试数据
    userinfo.txt文件内容(每行数据之间用tab键隔开):

    1    xiapi
    2    xiaoxue
    3    qingqing

    2). 程序代码

    package com.ljq.hive;
    
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.ResultSet;
    import java.sql.SQLException;
    import java.sql.Statement;
    
    import org.apache.log4j.Logger;
    
    public class HiveJdbcClient {
            private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
            private static String url = "jdbc:hive://192.168.11.157:10000/default";
            private static String user = "hive";
            private static String password = "mysql";
            private static String sql = "";
            private static ResultSet res;
            private static final Logger log = Logger.getLogger(HiveJdbcClient.class);
    
            public static void main(String[] args) {
                    try {
                            Class.forName(driverName);
                            Connection conn = DriverManager.getConnection(url, user, password);
                            Statement stmt = conn.createStatement();
    
                            // 创建的表名
                            String tableName = "testHiveDriverTable";
                            /** 第一步:存在就先删除 **/
                            sql = "drop table " + tableName;
                            stmt.executeQuery(sql);
    
                            /** 第二步:不存在就创建 **/
                            sql = "create table " + tableName + " (key int, value string)  row format delimited fields terminated by '\t'";
                            stmt.executeQuery(sql);
    
                            // 执行“show tables”操作
                            sql = "show tables '" + tableName + "'";
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“show tables”运行结果:");
                            if (res.next()) {
                                    System.out.println(res.getString(1));
                            }
    
                            // 执行“describe table”操作
                            sql = "describe " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“describe table”运行结果:");
                            while (res.next()) {  
                                    System.out.println(res.getString(1) + "\t" + res.getString(2));
                            }
    
                            // 执行“load data into table”操作
                            String filepath = "/home/hadoop/ziliao/userinfo.txt";
                            sql = "load data local inpath '" + filepath + "' into table " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            
                            // 执行“select * query”操作
                            sql = "select * from " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“select * query”运行结果:");
                            while (res.next()) {
                                    System.out.println(res.getInt(1) + "\t" + res.getString(2));
                            }
    
                            // 执行“regular hive query”操作
                            sql = "select count(1) from " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“regular hive query”运行结果:");
                            while (res.next()) {
                                    System.out.println(res.getString(1));
    
                            }
    
                            conn.close();
                            conn = null;
                    } catch (ClassNotFoundException e) {
                            e.printStackTrace();
                            log.error(driverName + " not found!", e);
                            System.exit(1);
                    } catch (SQLException e) {
                            e.printStackTrace();
                            log.error("Connection error!", e);
                            System.exit(1);
                    }
    
            }
    }

    3). 运行结果(右击-->Run as-->Run on Hadoop)

    Running:show tables 'testHiveDriverTable'
    执行“show tables”运行结果:
    testhivedrivertable
    Running:describe testHiveDriverTable
    执行“describe table”运行结果:
    key    int
    value    string
    Running:load data local inpath '/home/hadoop/ziliao/userinfo.txt' into table testHiveDriverTable
    Running:select * from testHiveDriverTable
    执行“select * query”运行结果:
    1    xiapi
    2    xiaoxue
    3    qingqing
    Running:select count(1) from testHiveDriverTable
    执行“regular hive query”运行结果:
    3
  • 相关阅读:
    php魔术常量,_CLASS_,_METHOD_,_FUNCTION_
    php里parent,::和self的分别
    rewrite例子集合
    PHP中::、->;、self、$this操作符的区别
    php中const和define的区别
    aliyun二级域名绑定
    数组操作函数8
    http 301 和 302的区别
    lnmp引发的fpm502错误!
    关于php下开发的跨域问题总结(全)
  • 原文地址:https://www.cnblogs.com/linjiqin/p/2947848.html
Copyright © 2011-2022 走看看