zoukankan      html  css  js  c++  java
  • Java操作Hive

    Hadoop版本:hadoop-2.9.2.tar.gz,Hive版本:apache-hive-2.3.6-src.tar.gz,安装Hive可查看:CentOS安装Hive

    保证Hive以正确启动hiveserver2

    pom.xml依赖:

    <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
        xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
        <modelVersion>4.0.0</modelVersion>
    
        <groupId>com.zhi.test</groupId>
        <artifactId>hive-test</artifactId>
        <version>0.0.1-SNAPSHOT</version>
        <packaging>jar</packaging>
    
        <name>hive-test</name>
        <url>http://maven.apache.org</url>
    
        <properties>
            <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
            <!-- Logger -->
            <lg4j2.version>2.12.1</lg4j2.version>
        </properties>
    
        <dependencies>
            <dependency>
                <groupId>org.apache.hive</groupId>
                <artifactId>hive-jdbc</artifactId>
                <version>2.3.6</version>
                <exclusions>
                    <exclusion>
                        <groupId>log4j</groupId>
                        <artifactId>log4j</artifactId>
                    </exclusion>
                    <exclusion>
                        <groupId>org.slf4j</groupId>
                        <artifactId>slf4j-log4j12</artifactId>
                    </exclusion>
                </exclusions>
            </dependency>
    
            <dependency>
                <groupId>mysql</groupId>
                <artifactId>mysql-connector-java</artifactId>
                <version>8.0.17</version>
            </dependency>
    
            <!-- Logger(log4j2) -->
            <dependency>
                <groupId>org.apache.logging.log4j</groupId>
                <artifactId>log4j-core</artifactId>
                <version>${lg4j2.version}</version>
            </dependency>
            <!-- Log4j 1.x API Bridge -->
            <dependency>
                <groupId>org.apache.logging.log4j</groupId>
                <artifactId>log4j-1.2-api</artifactId>
                <version>${lg4j2.version}</version>
            </dependency>
            <!-- SLF4J Bridge -->
            <dependency>
                <groupId>org.apache.logging.log4j</groupId>
                <artifactId>log4j-slf4j-impl</artifactId>
                <version>${lg4j2.version}</version>
            </dependency>
    
            <dependency>
                <groupId>jdk.tools</groupId>
                <artifactId>jdk.tools</artifactId>
                <version>1.8</version>
                <scope>system</scope>
                <systemPath>D:Program FilesJavajdk1.8.0_191lib/tools.jar</systemPath>
            </dependency>
    
            <dependency>
                <groupId>org.junit.jupiter</groupId>
                <artifactId>junit-jupiter-engine</artifactId>
                <version>5.5.2</version>
            </dependency>
        </dependencies>
    </project>

    Java代码:

    package com.zhi.test.hive;
    
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.ResultSet;
    import java.sql.SQLException;
    import java.sql.Statement;
    
    import org.apache.logging.log4j.LogManager;
    import org.apache.logging.log4j.Logger;
    import org.junit.jupiter.api.AfterAll;
    import org.junit.jupiter.api.BeforeAll;
    import org.junit.jupiter.api.MethodOrderer;
    import org.junit.jupiter.api.Order;
    import org.junit.jupiter.api.Test;
    import org.junit.jupiter.api.TestInstance;
    import org.junit.jupiter.api.TestMethodOrder;
    import org.junit.jupiter.api.TestInstance.Lifecycle;
    
    /**
     * 使用JDBC进行Hive操作
     * 
     * @author zhi
     * @since 2019年9月11日11:55:00
     *
     */
    @TestInstance(Lifecycle.PER_CLASS)
    @TestMethodOrder(MethodOrderer.OrderAnnotation.class)
    public class HiveTest {
        private final Logger logger = LogManager.getLogger(this.getClass());
        private static Connection connection = null;
    
        @BeforeAll
        public void init() throws Exception {
            try {
                Class.forName("org.apache.hive.jdbc.HiveDriver");
                connection = DriverManager.getConnection("jdbc:hive2://192.168.107.143:10000/test_db", "root", "abc123");
            } catch (SQLException | ClassNotFoundException e) {
                logger.error("创建Hive连接失败", e);
                throw e;
            }
        }
    
        @AfterAll
        public void destory() throws Exception {
            if (connection != null) {
                connection.close();
            }
        }
    
        /**
         * 创建数据库
         */
        @Order(1)
        @Test
        public void createDatabase() {
            String sql = "create database test_db";
            logger.info("创建数据库,脚本:{}", sql);
            try (Statement statement = connection.createStatement()) {
                statement.execute(sql);
                logger.info("创建数据库成功");
            } catch (SQLException e) {
                logger.error("创建数据库出错", e);
            }
        }
    
        /**
         * 查询数据库
         */
        @Order(2)
        @Test
        public void showDatabases() {
            String sql = "show databases";
            logger.info("查询数据库,脚本:{}", sql);
            try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) {
                while (rs.next()) {
                    logger.info("查询到数据库:{}", rs.getString(1));
                }
            } catch (SQLException e) {
                logger.error("创建数据库出错", e);
            }
        }
    
        /**
         * 创建表
         */
        @Order(3)
        @Test
        public void createTable() {
            String sql = "create table user_tb(id int, name string) row format delimited fields terminated by ','";
            logger.info("创建表,脚本:{}", sql);
            try (Statement statement = connection.createStatement()) {
                statement.execute(sql);
                logger.info("创建表成功");
            } catch (SQLException e) {
                logger.error("创建表出错", e);
            }
        }
    
        /**
         * 查询所有表
         */
        @Order(3)
        @Test
        public void showTables() {
            String sql = "show tables";
            logger.info("查询所有表,脚本:{}", sql);
            try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) {
                while (rs.next()) {
                    logger.info("查询到表:{}", rs.getString(1));
                }
            } catch (SQLException e) {
                logger.error("查询所有表出错", e);
            }
        }
    
        /**
         * 查看表结构
         */
        @Order(4)
        @Test
        public void descTable() {
            String sql = "desc user_tb";
            logger.info("查看表结构,脚本:{}", sql);
            try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) {
                while (rs.next()) {
                    logger.info("字段名:{},类型:{}", rs.getString(1), rs.getString(2));
                }
            } catch (SQLException e) {
                logger.error("查看表结构出错", e);
            }
        }
    
        /**
         * 导入数据,data.txt中的数据为格式为:<br>
         * 1,张三<br>
         * 2,李四
         */
        @Order(5)
        @Test
        public void loadData() {
            String sql = "load data local inpath '/home/data.txt' overwrite into table user_tb";
            logger.info("导入数据,脚本:{}", sql);
            try (Statement statement = connection.createStatement()) {
                statement.execute(sql);
                logger.info("导入数据成功");
            } catch (SQLException e) {
                logger.error("导入数据出错", e);
            }
        }
    
        /**
         * 查询数据
         */
        @Order(6)
        @Test
        public void selectData() {
            String sql = "select * from user_tb";
            logger.info("查询数据,脚本:{}", sql);
            try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) {
                while (rs.next()) {
                    logger.info("id={},name={}", rs.getInt("id"), rs.getString("name"));
                }
            } catch (SQLException e) {
                logger.error("查询数据出错", e);
            }
        }
    
        /**
         * 查数量
         */
        @Order(7)
        @Test
        public void count() {
            String sql = "select count(1) from user_tb";
            logger.info("查数量,脚本:{}", sql);
            try (Statement statement = connection.createStatement(); ResultSet rs = statement.executeQuery(sql)) {
                while (rs.next()) {
                    logger.info("数量={}", rs.getInt(1));
                }
            } catch (SQLException e) {
                logger.error("查数量出错", e);
            }
        }
    
        /**
         * 删除表
         */
        @Order(8)
        @Test
        public void deopTable() {
            String sql = "drop table if exists user_tb";
            logger.info("删除表,脚本:{}", sql);
            try (Statement statement = connection.createStatement()) {
                statement.execute(sql);
                logger.info("删除表成功");
            } catch (SQLException e) {
                logger.error("删除表出错", e);
            }
        }
    
        /**
         * 删除数据库
         */
        @Order(9)
        @Test
        public void dropDatabase() {
            String sql = "drop database if exists test_db";
            logger.info("删除数据库,脚本:{}", sql);
            try (Statement statement = connection.createStatement()) {
                statement.execute(sql);
                logger.info("删除数据库成功");
            } catch (SQLException e) {
                logger.error("删除数据库出错", e);
            }
        }
    }

    代码执行结果:

    2019-09-11 22:05:52.324 [main] INFO  org.apache.hive.jdbc.Utils.parseURL 325 - Supplied authorities: 192.168.107.143:10000
    2019-09-11 22:05:52.334 [main] INFO  org.apache.hive.jdbc.Utils.parseURL 444 - Resolved authority: 192.168.107.143:10000
    2019-09-11 22:05:52.772 [main] INFO  com.zhi.test.hive.HiveTest.createDatabase 58 - 创建数据库,脚本:create database test_db
    2019-09-11 22:05:53.098 [main] INFO  com.zhi.test.hive.HiveTest.createDatabase 61 - 创建数据库成功
    2019-09-11 22:05:53.131 [main] INFO  com.zhi.test.hive.HiveTest.showDatabases 74 - 查询数据库,脚本:show databases
    2019-09-11 22:05:53.332 [main] INFO  com.zhi.test.hive.HiveTest.showDatabases 77 - 查询到数据库:default
    2019-09-11 22:05:53.332 [main] INFO  com.zhi.test.hive.HiveTest.showDatabases 77 - 查询到数据库:test_db
    2019-09-11 22:05:53.347 [main] INFO  com.zhi.test.hive.HiveTest.createTable 91 - 创建表,脚本:create table user_tb(id int, name string) row format delimited fields terminated by ','
    2019-09-11 22:05:53.623 [main] INFO  com.zhi.test.hive.HiveTest.createTable 94 - 创建表成功
    2019-09-11 22:05:53.644 [main] INFO  com.zhi.test.hive.HiveTest.showTables 107 - 查询所有表,脚本:show tables
    2019-09-11 22:05:53.784 [main] INFO  com.zhi.test.hive.HiveTest.showTables 110 - 查询到表:user_tb
    2019-09-11 22:05:53.784 [main] INFO  com.zhi.test.hive.HiveTest.showTables 110 - 查询到表:usertest_tb
    2019-09-11 22:05:53.806 [main] INFO  com.zhi.test.hive.HiveTest.descTable 124 - 查看表结构,脚本:desc user_tb
    2019-09-11 22:05:53.971 [main] INFO  com.zhi.test.hive.HiveTest.descTable 127 - 字段名:id,类型:int
    2019-09-11 22:05:53.971 [main] INFO  com.zhi.test.hive.HiveTest.descTable 127 - 字段名:name,类型:string
    2019-09-11 22:05:53.987 [main] INFO  com.zhi.test.hive.HiveTest.loadData 143 - 导入数据,脚本:load data local inpath '/home/data.txt' overwrite into table user_tb
    2019-09-11 22:05:55.106 [main] INFO  com.zhi.test.hive.HiveTest.loadData 146 - 导入数据成功
    2019-09-11 22:05:55.119 [main] INFO  com.zhi.test.hive.HiveTest.selectData 159 - 查询数据,脚本:select * from user_tb
    2019-09-11 22:05:55.870 [main] INFO  com.zhi.test.hive.HiveTest.selectData 162 - id=1,name=张三
    2019-09-11 22:05:55.871 [main] INFO  com.zhi.test.hive.HiveTest.selectData 162 - id=2,name=李四
    2019-09-11 22:05:55.890 [main] INFO  com.zhi.test.hive.HiveTest.count 176 - 查数量,脚本:select count(1) from user_tb
    2019-09-11 22:05:57.952 [main] INFO  com.zhi.test.hive.HiveTest.count 179 - 数量=2
    2019-09-11 22:05:57.983 [main] INFO  com.zhi.test.hive.HiveTest.deopTable 193 - 删除表,脚本:drop table if exists user_tb
    2019-09-11 22:05:58.256 [main] INFO  com.zhi.test.hive.HiveTest.deopTable 196 - 删除表成功
    2019-09-11 22:05:58.268 [main] INFO  com.zhi.test.hive.HiveTest.dropDatabase 209 - 删除数据库,脚本:drop database if exists test_db
    2019-09-11 22:05:58.494 [main] INFO  com.zhi.test.hive.HiveTest.dropDatabase 212 - 删除数据库成功
  • 相关阅读:
    python 回溯法 记录
    java提高篇(二三)-----HashMap
    java提高篇(二二)---LinkedList
    java提高篇(二一)-----ArrayList
    java提高篇(二十)-----集合大家族
    java提高篇(十九)-----数组之二
    java提高篇(十八)-----数组之一:认识JAVA数组
    java提高篇(十七)-----异常(二)
    java提高篇(十六)-----异常(一)
    jQuery读取和设定KindEditor的值
  • 原文地址:https://www.cnblogs.com/zhi-leaf/p/11509186.html
Copyright © 2011-2022 走看看