Spark将RDD转换成DataFrame的两种方式

介绍一下Spark将RDD转换成DataFrame的两种方式。
1.通过是使用case class的方式,不过在scala 2.10中最大支持22个字段的case class,这点需要注意
2.是通过spark内部的StructType方式,将普通的RDD转换成DataFrame
装换成DataFrame后,就可以使用SparkSQL来进行数据筛选过滤等操作

下面直接代码说话

package spark_rdd

import org.apache.spark._
import org.apache.spark.sql._
import org.apache.spark.sql.types._

object SparkRDDtoDF {
  
  //StructType and convert RDD to DataFrame
  def rddToDF(sparkSession : SparkSession):DataFrame = {
    //设置schema结构
    val schema = StructType(
      Seq(
        StructField("name",StringType,true)          
        ,StructField("age",IntegerType,true)
      )
    )
    val rowRDD = sparkSession.sparkContext
      .textFile("file:/E:/scala_workspace/z_spark_study/people.txt",2)
      .map( x => x.split(",")).map( x => Row(x(0),x(1).trim().toInt))  
    sparkSession.createDataFrame(rowRDD,schema)
  }
  
  //use case class Person
  case class Person(name:String,age:Int)
  def rddToDFCase(sparkSession : SparkSession):DataFrame = {
    //导入隐饰操作,否则RDD无法调用toDF方法
    import sparkSession.implicits._
    val peopleRDD = sparkSession.sparkContext
      .textFile("file:/E:/scala_workspace/z_spark_study/people.txt",2)
      .map( x => x.split(",")).map( x => Person(x(0),x(1).trim().toInt)).toDF()
    peopleRDD
  }
  
  def main(agrs : Array[String]):Unit = {
      val conf = new SparkConf().setMaster("local[2]")
      conf.set("spark.sql.warehouse.dir","file:/E:/scala_workspace/z_spark_study/")
      conf.set("spark.sql.shuffle.partitions","20")
      val sparkSession = SparkSession.builder().appName("RDD to DataFrame")
            .config(conf).getOrCreate()
       //通过代码的方式,设置Spark log4j的级别
      sparkSession.sparkContext.setLogLevel("WARN")
      import sparkSession.implicits._
      //use case class convert RDD to DataFrame
      //val peopleDF = rddToDFCase(sparkSession)
      
      //use StructType  convert RDD to DataFrame
      val peopleDF = rddToDF(sparkSession)
      peopleDF.show()
      peopleDF.select($"name",$"age").filter($"age">20).show()
      
  }
  
}

猜你喜欢

转载自zhao-rock.iteye.com/blog/2328161