zoukankan      html  css  js  c++  java
  • Spark2.0 Java实现将Hive运算结果保存到数据库

    package com.gm.hive.SparkHive;
    
    import org.apache.spark.sql.Dataset;
    import org.apache.spark.sql.Row;
    import org.apache.spark.sql.SaveMode;
    import org.apache.spark.sql.SparkSession;
    import java.util.Properties;
    /**
     * Spark sql获取Hive数据
     * 
     */
    public class App {
    	public static void main(String[] args) {
    
    		SparkSession spark = SparkSession.builder().master("local[2]")
    				.appName("SparkHive")
    				.config("spark.sql.warehouse.dir", "/user/hive/warehouse/").enableHiveSupport()
    				.getOrCreate();
    		
    		//spark.sql.warehouse.dir为hive的hive.metastore.warehouse.dir路径
    		spark.sql("show databases").show();
    		spark.sql("show tables").show();
    		spark.sql("use db_hive_edu");
    		Dataset<Row> data = spark
    				.sql("select hc_storetypeid as typeid,count(hc_storetypeid) as kczs from db_hive_edu.hc_casewoodlist where hc_wpstate=2 and hc_storetypeid !='null' group by hc_storetypeid order by hc_storetypeid");
    		data.show();
    		
    		//数据库内容
    		String url = "jdbc:postgresql://192.168.174.200:5432/postgres?charSet=utf-8";
    		Properties connectionProperties = new Properties();
    		connectionProperties.put("user","postgres");
    		connectionProperties.put("password","postgres");
    		connectionProperties.put("driver","org.postgresql.Driver");
    		
    		//将数据通过覆盖的形式保存在数据表中
    		data.write().mode(SaveMode.Overwrite).jdbc(url, "kczyqktj", connectionProperties);
    
    	}
    }

  • 相关阅读:
    36、基于TCP、UDP协议的嵌套字通信
    34、异常以及网络编程
    作业4月15号
    31、反射与内置方法、元类
    30、多态与鸭子类型以及内置函数
    作业4月9号
    29、继承
    作业4月8号
    28、封装
    27、面向对象
  • 原文地址:https://www.cnblogs.com/gmhappy/p/9472435.html
Copyright © 2011-2022 走看看