zoukankan      html  css  js  c++  java
  • Spark RDD转DataFrame

    import org.apache.spark.sql.{Row, SparkSession}
    import org.apache.spark.sql.types.{IntegerType, StringType, StructField, StructType}
    
    case class Person(val name:String, val age:Int)
    /** * RDD转DataFrame */ object DataFrameRDD { def main(args: Array[String]): Unit = { val spark = SparkSession.builder() .appName("") .master("local") .getOrCreate() val rdd = spark.sparkContext.textFile("person.text") /** * 1.RDD转DataFrame * 导入包,支持把一个RDD隐式转换为DataFrame, * 这里的spark不是某个包下的东西,而是上面声明的变量 */ import spark.implicits._ val df = rdd.map(_.split(",")).map(attributes=>Person(attributes(0),attributes(1).trim.toInt)).toDF() //注册表 df.createOrReplaceTempView("person") val frame = spark.sql("select * from person where age > 20") df.map(t=> "name:"+t(0) + "," + "age:"+t(1) ).show() /** * 2.RDD转DataFrame */ //构建表头 val fields = Array(StructField("name",StringType,true),StructField("age",IntegerType,true)) val schema = StructType(fields) //表中的记录 val rowRdd = rdd.map(_.split(",")).map(attributes=> Row(attributes(0),attributes(1).trim.toInt)) //把表头和表中的记录拼接起来 val dataframe = spark.createDataFrame(rowRdd,schema) //注册表 dataframe.createOrReplaceTempView("person") val dataframe2 = spark.sql("select * from person where age > 20") dataframe2.map(t=> "name:"+t(0) + "," + "age:"+t(1) ).show() } }
  • 相关阅读:
    C++中内联函数
    剑指offer62:二插搜索树的第k个节点
    剑指offer63:数据流中的中位数
    剑指offer64:滑动窗口的最大值
    剑指offer65:矩阵中的路径
    剑指offer66:机器人的活动范围
    kmean算法C++实现
    【数组】Minimum Size Subarray Sum
    【数组】Missing Number
    【数组】Product of Array Except Self
  • 原文地址:https://www.cnblogs.com/zxbdboke/p/12749542.html
Copyright © 2011-2022 走看看