Spark 读取excel csv

多了不说 少了不唠 直接上代码

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by ll on 2018/5/17.
  */
object SparkReadFileCsv {

  def main(args: Array[String]) {

    val localpath="F:\\1.csv"
    val conf = new SparkConf()
    conf.setAppName("SparkReadFile")
    conf.setMaster("local")
    val sparkContext = new SparkContext(conf)
    val sqlContext = new SQLContext(sparkContext)
    //读csv文件
    val data: DataFrame = sqlContext.read.format("com.databricks.spark.csv")
      .option("header", "true") //在csv第一行有属性"true",没有就是"false"
      .option("inferSchema", true.toString) //这是自动推断属性列的数据类型
      .load(localpath)
    data.show()

  }

}

pom.xml

<dependencies>

    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.10</artifactId>
        <version>1.6.1</version>
    </dependency>


    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.10</artifactId>
        <version>1.6.1</version>
    </dependency>


    <dependency>
        <groupId>com.databricks</groupId>
        <artifactId>spark-csv_2.10</artifactId>
        <version>1.4.0</version>
        <scope>compile</scope>
    </dependency>


</dependencies>

猜你喜欢

转载自blog.csdn.net/qq_33283716/article/details/80423130