zoukankan      html  css  js  c++  java
  • 实战1——Hive与JDBC示例

    在使用 JDBC 开发 Hive 程序时,  必须首先开启 Hive 的远程服务接口。使用下面命令进行开启:

    hive -service hiveserver &

    1). 测试数据
    userinfo.txt文件内容(每行数据之间用tab键隔开):

    1    xiapi
    2    xiaoxue
    3    qingqing

    2). 程序代码

    package com.ljq.hive;
    
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.ResultSet;
    import java.sql.SQLException;
    import java.sql.Statement;
    
    import org.apache.log4j.Logger;
    
    public class HiveJdbcClient {
            private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
            private static String url = "jdbc:hive://192.168.11.157:10000/default";
            private static String user = "hive";
            private static String password = "mysql";
            private static String sql = "";
            private static ResultSet res;
            private static final Logger log = Logger.getLogger(HiveJdbcClient.class);
    
            public static void main(String[] args) {
                    try {
                            Class.forName(driverName);
                            Connection conn = DriverManager.getConnection(url, user, password);
                            Statement stmt = conn.createStatement();
    
                            // 创建的表名
                            String tableName = "testHiveDriverTable";
                            /** 第一步:存在就先删除 **/
                            sql = "drop table " + tableName;
                            stmt.executeQuery(sql);
    
                            /** 第二步:不存在就创建 **/
                            sql = "create table " + tableName + " (key int, value string)  row format delimited fields terminated by '\t'";
                            stmt.executeQuery(sql);
    
                            // 执行“show tables”操作
                            sql = "show tables '" + tableName + "'";
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“show tables”运行结果:");
                            if (res.next()) {
                                    System.out.println(res.getString(1));
                            }
    
                            // 执行“describe table”操作
                            sql = "describe " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“describe table”运行结果:");
                            while (res.next()) {  
                                    System.out.println(res.getString(1) + "\t" + res.getString(2));
                            }
    
                            // 执行“load data into table”操作
                            String filepath = "/home/hadoop/ziliao/userinfo.txt";
                            sql = "load data local inpath '" + filepath + "' into table " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            
                            // 执行“select * query”操作
                            sql = "select * from " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“select * query”运行结果:");
                            while (res.next()) {
                                    System.out.println(res.getInt(1) + "\t" + res.getString(2));
                            }
    
                            // 执行“regular hive query”操作
                            sql = "select count(1) from " + tableName;
                            System.out.println("Running:" + sql);
                            res = stmt.executeQuery(sql);
                            System.out.println("执行“regular hive query”运行结果:");
                            while (res.next()) {
                                    System.out.println(res.getString(1));
    
                            }
    
                            conn.close();
                            conn = null;
                    } catch (ClassNotFoundException e) {
                            e.printStackTrace();
                            log.error(driverName + " not found!", e);
                            System.exit(1);
                    } catch (SQLException e) {
                            e.printStackTrace();
                            log.error("Connection error!", e);
                            System.exit(1);
                    }
    
            }
    }

    3). 运行结果(右击-->Run as-->Run on Hadoop)

    Running:show tables 'testHiveDriverTable'
    执行“show tables”运行结果:
    testhivedrivertable
    Running:describe testHiveDriverTable
    执行“describe table”运行结果:
    key    int
    value    string
    Running:load data local inpath '/home/hadoop/ziliao/userinfo.txt' into table testHiveDriverTable
    Running:select * from testHiveDriverTable
    执行“select * query”运行结果:
    1    xiapi
    2    xiaoxue
    3    qingqing
    Running:select count(1) from testHiveDriverTable
    执行“regular hive query”运行结果:
    3
  • 相关阅读:
    『cs231n』作业2选讲_通过代码理解优化器
    谷歌(Google)学术镜像,谷歌镜像
    官网实例详解-目录和实例简介-keras学习笔记四
    深度挖坑:从数据角度看人脸识别中Feature Normalization,Weight Normalization以及Triplet的作用
    NIPS 2018 | 程序翻译新突破:UC伯克利提出树到树的程序翻译神经网络
    烧脑!CMU、北大等合著论文真的找到了神经网络的全局最优解
    win7+cuda+anaconda python+tensorflow-gpu+keras安装成功版本匹配汇总
    Delphi 在DLL中使用DevExpress控件时出错解决办法
    让文件添加鼠标右键菜单
    phpStudy模式下安装ssl证书,详细版
  • 原文地址:https://www.cnblogs.com/linjiqin/p/2947848.html
Copyright © 2011-2022 走看看