1.doc上的解释(https://spark.apache.org/docs/2.1.0/api/java/org/apache/spark/sql/Column.html)
df("columnName") // On a specific DataFrame.
col("columnName") // A generic column no yet associated with a DataFrame.
col("columnName.field") // Extracting a struct field
col("`a.column.with.dots`") // Escape `.` in column names.
$"columnName" // Scala short hand for a named column.
expr("a + 1") // A column that is constructed from a parsed SQL Expression.
lit("abc") // A column that produces a literal (constant) value.
2.使用时涉及到的的包
import spark.implicits._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.Column
3.示例
scala> val idCol = $"id"
idCol: org.apache.spark.sql.ColumnName = id
scala> val idCol = col("id")
idCol: org.apache.spark.sql.Column = id
scala> val idCol = column("id")
idCol: org.apache.spark.sql.Column = id
scala> val dataset = spark.range(5).toDF("text")
dataset: org.apache.spark.sql.DataFrame = [text: bigint]
scala> val textCol = dataset.col("text")
textCol: org.apache.spark.sql.Column = text
scala> val textCol = dataset.apply("text")
textCol: org.apache.spark.sql.Column = text
scala> val textCol = dataset("text")
textCol: org.apache.spark.sql.Column = text