实例代码
package com.spark import org.apache.spark.sql.SparkSession /** * Parquet文件操作 */ object ParquetAPP { def main(args: Array[String]): Unit = { val spark=SparkSession.builder().appName("DataFrameApp").master("local[2]").getOrCreate() /** * spark.read.format("Parquet").load()标准写法 */ val userDF=spark.read.format("Parquet").load("path") userDF.printSchema() userDF.show() userDF.select("name").write.format("json").save("path") spark.read.load("path").show() spark.stop() } }