zoukankan      html  css  js  c++  java
  • Kudu Native RDD

    Spark与Kudu的集成同事提供了kudu RDD

    import org.apache.kudu.spark.kudu.KuduContext
    import org.apache.spark.{SparkConf, SparkContext}
    import org.apache.spark.sql.{Row, SparkSession}
    
    /**
      * Created by angel;
      */
    object KuduNativeRDD {
      def main(args: Array[String]): Unit = {
        val sparkConf = new SparkConf().setAppName("AcctfileProcess")
          //设置Master_IP并设置spark参数
          .setMaster("local")
          .set("spark.worker.timeout", "500")
          .set("spark.cores.max", "10")
          .set("spark.rpc.askTimeout", "600s")
          .set("spark.network.timeout", "600s")
          .set("spark.task.maxFailures", "1")
          .set("spark.speculationfalse", "false")
          .set("spark.driver.allowMultipleContexts", "true")
          .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        val sparkContext = SparkContext.getOrCreate(sparkConf)
        val sqlContext = SparkSession.builder().config(sparkConf).getOrCreate().sqlContext
        //使用spark创建kudu表
        val kuduMasters = "hadoop01:7051,hadoop02:7051,hadoop03:7051"
        val kuduContext = new KuduContext(kuduMasters, sqlContext.sparkContext)
        //TODO 1:定义kudu表
        val kuduTableName = "spark_kudu_tbl"
        //TODO 2:指定想要的列
        val kuduTableProjColumns = Seq("name", "age")
    
        //TODO 3:读取表,将数据转换成rdd
        val custRDD = kuduContext.kuduRDD(sparkContext, kuduTableName, kuduTableProjColumns)
    
        //TODO 4:将rdd数据转换成tuple
        val custTuple = custRDD.map {
          case Row(name: String, age: Int) => (name, age)
        }
        //TODO 5:打印
        custTuple.collect().foreach(println(_))
      }
    }
  • 相关阅读:
    ES6中对象新增方法
    ES6中字符串新增方法
    Laya 吐槽日志.
    汇编与反汇编工具
    Mac 软件下载地址
    红米手机 android4.4.4 root之路
    查看apk安装包信息
    文件搜索
    自动发表QQ空间说说
    批量格式化json
  • 原文地址:https://www.cnblogs.com/niutao/p/10555410.html
Copyright © 2011-2022 走看看