zoukankan      html  css  js  c++  java
  • Hive2 jdbc test

    package andes;

       

    import java.io.BufferedWriter;

    import java.io.FileOutputStream;

    import java.io.IOException;

    import java.io.OutputStreamWriter;

    import java.sql.Connection;

    import java.sql.DriverManager;

    import java.sql.ResultSet;

    import java.sql.ResultSetMetaData;

    import java.sql.SQLException;

    import java.sql.Statement;

       

    import org.apache.hadoop.conf.Configuration;

    import org.apache.hadoop.security.UserGroupInformation;

       

    public class Hive2JdbcTest {

       

    // org.apache.hadoop.hive.jdbc.HiveDriver

    // org.apache.hive.jdbc.HiveDriver

    private static String driverName = "org.apache.hive.jdbc.HiveDriver";

       

    public static void main(String[] args) throws SQLException {

    try {

    Class.forName(driverName);

    } catch (ClassNotFoundException e) {

    e.printStackTrace();

    System.exit(1);

    }

       

    Configuration conf = new Configuration();

    conf.setBoolean("hadoop.security.authorization", true);

    conf.set("hadoop.security.authentication", "kerberos");

       

    try {

    UserGroupInformation.loginUserFromKeytab("user@domain",

    "/home/user/user.keytab");

    } catch (IOException e) {

    e.printStackTrace();

    }

       

    Connection con = DriverManager

    .getConnection(

    "jdbc:hive2://host:port/db;principal=hive/host

    @domain",

    "", "");

    Statement stmt = con.createStatement();

       

    // stmt.executeQuery("use db");

    String sql = "select * from table 2000";

    System.out.println("Running: " + sql);

    ResultSet res = stmt.executeQuery(sql);

    // dump res to file

    try {

    write2file("c:\work\jdbctest.txt", res);

    } catch (IOException e) {

    e.printStackTrace();

    }

    res.close();

    }

       

    static void write2file(String filepath, ResultSet res) throws SQLException,

    IOException {

    // filepath="c:\work\jdbctest.txt";

    FileOutputStream fout;

    fout = new FileOutputStream(filepath);

    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(fout,

    "UTF-8"));

    ResultSetMetaData meta = res.getMetaData();

    int numberOfColumns = meta.getColumnCount();

    while (res.next()) {

    for (int i = 1; i < numberOfColumns; i++) {

    bw.write(String.valueOf(res.getObject(i)));

    bw.write(' ');

    }

    bw.write(String.valueOf(res.getObject(numberOfColumns)));

    bw.newLine();

    }

    bw.close();

    System.out.println("finished");

    }

    }

    java command to run jar file:

    java -classpath ".:/home/test/tmp/andes/*:" andes.HiveJdbcClient /home/test/tmp/andes/test.keytab

  • 相关阅读:
    [BZOJ3160]万径人踪灭
    [BZOJ5212][ZJOI2018]历史
    [BZOJ3563&3569]DZY Loves Chinese
    [HDU4336]Card Collector
    [HDU4652]Dice
    [POJ3683]Priest John's Busiest Day
    ISODateTimeFormat 转换2019-08-15T00:36:49.366456463Z 日期格式
    GoTTY-K8S-Docker 终端
    【php】PHP对redis操作详解
    【tool】VLC播放rtmp协议
  • 原文地址:https://www.cnblogs.com/huaxiaoyao/p/4499471.html
Copyright © 2011-2022 走看看