zoukankan      html  css  js  c++  java
  • Hive记录-配置远程连接(JAVA/beeline)

    1.修改配置hive-site.xml    hadoop core-site.xml限制---参考Hive记录-部署Hive环境

    2.启动hadoop 

     #sh /usr/app/hadoop/sbin/start-all.sh
    3.jps查看五大进程是否齐全:NameNode  DataNode NodeManager ResourceManager SecondaryNameNode
    4.启动hiveserver2服务
    #hive --service hiveserver2
    #netstat -ant | grep 10000   #监听

    5.beeline连接

    #beeline
    #beeline>!connect jdbc:hive2://192.168.66.66:10000
    #输入用户名和密码       ###登录系统的用户名root也行,但是要有操作hadoop hive文件夹的权限
    #连接成功就可以操作hive数据了


    6.Java远程连接

    6.1 所需包(hive-xxx.jar要与服务器上的hive版本一致)



    6.2 src下新建log4j.properties

    -------------------------------------------------------
    inceptor.root.logger=INFO,RFA
    inceptor.log.dir=/usr/app/hive/logs
    inceptor.log.file=spark.log
    
    # Define the root logger to the system property "hadoop.root.logger".
    log4j.rootLogger=${inceptor.root.logger}
    
    # Set everything to be logged to the console
    log4j.appender.console=org.apache.log4j.ConsoleAppender
    log4j.appender.console.target=System.err
    log4j.appender.console.layout=org.apache.log4j.PatternLayout
    log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c: %m%n
    
    # output to file
    log4j.appender.RFA=org.apache.log4j.RollingFileAppender
    log4j.appender.RFA.File=${inceptor.log.dir}/${inceptor.log.file}
    # The MaxFileSize can be 512KB
    log4j.appender.RFA.MaxFileSize=10MB
    # Keep three backup files.
    log4j.appender.RFA.MaxBackupIndex=1024
    # Pattern to output: date priority [category] - message
    log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
    log4j.appender.RFA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c: %m%n
    
    # Ignore messages below warning level from Jetty, because it's a bit verbose
    log4j.logger.org.eclipse.jetty=WARN
    6.3  ToHive.java

    package hive;
    
    import java.sql.Connection;
    import java.sql.DriverManager;
    import java.sql.ResultSet;
    import java.sql.SQLException;
    import java.sql.Statement;
    
    public class ToHive {
    	private static String driverName = "org.apache.hive.jdbc.HiveDriver";
    	public boolean run() {
    
    		try {
    			Class.forName(driverName);
    			Connection con = null;
    			//端口号默认为10000,根据实际情况修改;
    			//用户名:root,密码:1(登录linux系统)
    			con = DriverManager.getConnection(
    					"jdbc:hive2://192.168.66.66:10000/siat", "root", "1");
    			Statement stmt = con.createStatement();
    			ResultSet res = null;
    			String sql = "select count(*) from test";
    			System.out.println("Running: " + sql);
    			res = stmt.executeQuery(sql);
    			System.out.println("ok");
    			while (res.next()) {
    				System.out.println(res.getString(1));
    			}
    			return true;
    		} catch (Exception e) {
    			e.printStackTrace();
    			System.out.println("error");
    			return false;
    		}
    	}
    	public static void main(String[] args) throws SQLException {
    		ToHive hiveJdbcClient = new ToHive();
    		hiveJdbcClient.run();
    	}
    
    }
    6.4 右键执行-提交任务给hadoop进行计算返回结果





  • 相关阅读:
    数据库mysql基础语言--各模式的含义
    Linux下判断磁盘是SSD还是HDD的几种方法
    linux解压大全
    RedHat Linux RHEL6配置本地YUM源
    利用ssh传输文件-服务器之间传输文件
    深入理解asp.net里的HttpModule机制
    WPF(一)
    JS中caller和callee
    Vue-Methods中使用Filter
    c#值类型与引用类型区别
  • 原文地址:https://www.cnblogs.com/xinfang520/p/7684613.html
Copyright © 2011-2022 走看看