spark sql csv数据源

package com.ws.jdbc
import org.apache.spark.sql.{DataFrame, SparkSession}
object CsvSource {

  def main(args: Array[String]): Unit = {
    val sparkSession = SparkSession.builder().appName("JsonSource").master("local[*]").getOrCreate()

    //读取csv数据,指定一个目录 ,数据类型都是String
    val data: DataFrame = sparkSession.read.csv("E:\\bigData\\testdata\\move.csv")

    //重新命名字段
    val dataFrame = data.toDF("id","age","score")
   
    val result = dataFrame.limit(10)
    result.printSchema()
    result.show()
    sparkSession.stop()
  }
}

猜你喜欢

转载自blog.csdn.net/bb23417274/article/details/82955589