zoukankan      html  css  js  c++  java
  • Kudu Native RDD

    Spark与Kudu的集成同事提供了kudu RDD

    import org.apache.kudu.spark.kudu.KuduContext
    import org.apache.spark.{SparkConf, SparkContext}
    import org.apache.spark.sql.{Row, SparkSession}
    
    /**
      * Created by angel;
      */
    object KuduNativeRDD {
      def main(args: Array[String]): Unit = {
        val sparkConf = new SparkConf().setAppName("AcctfileProcess")
          //设置Master_IP并设置spark参数
          .setMaster("local")
          .set("spark.worker.timeout", "500")
          .set("spark.cores.max", "10")
          .set("spark.rpc.askTimeout", "600s")
          .set("spark.network.timeout", "600s")
          .set("spark.task.maxFailures", "1")
          .set("spark.speculationfalse", "false")
          .set("spark.driver.allowMultipleContexts", "true")
          .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
        val sparkContext = SparkContext.getOrCreate(sparkConf)
        val sqlContext = SparkSession.builder().config(sparkConf).getOrCreate().sqlContext
        //使用spark创建kudu表
        val kuduMasters = "hadoop01:7051,hadoop02:7051,hadoop03:7051"
        val kuduContext = new KuduContext(kuduMasters, sqlContext.sparkContext)
        //TODO 1:定义kudu表
        val kuduTableName = "spark_kudu_tbl"
        //TODO 2:指定想要的列
        val kuduTableProjColumns = Seq("name", "age")
    
        //TODO 3:读取表,将数据转换成rdd
        val custRDD = kuduContext.kuduRDD(sparkContext, kuduTableName, kuduTableProjColumns)
    
        //TODO 4:将rdd数据转换成tuple
        val custTuple = custRDD.map {
          case Row(name: String, age: Int) => (name, age)
        }
        //TODO 5:打印
        custTuple.collect().foreach(println(_))
      }
    }
  • 相关阅读:
    第三章-套接字编程
    unix网络编程第四章----基于TCP套接字编程
    unix网络编程第一章demo
    unix网络编程第2章
    论epoll的实现
    Select函数实现
    函数式编程语言
    Git
    python库-urllib
    PIL处理图片信息
  • 原文地址:https://www.cnblogs.com/niutao/p/10555410.html
Copyright © 2011-2022 走看看