194、Spark 2.0之Dataset开发详解-typed操
2019-02-12 本文已影响0人
ZFH__ZJ
代码
object TypedOperation {
case class Employee(name: String, age: Long, depId: Long, gender: String, salary: Long)
def main(args: Array[String]): Unit = {
val sparkSession = SparkSession
.builder()
.appName("BasicOperation")
.master("local")
.getOrCreate()
import sparkSession.implicits._
import org.apache.spark.sql.functions._
val employeePath = this.getClass.getClassLoader.getResource("employee.json").getPath
val employeeDF = sparkSession.read.json(employeePath)
val employeeDS = employeeDF.as[Employee]
employeeDS.sort(employeeDS("salary").desc, employeeDS("age").asc).show()
}
}