Spark converts RDD into DataFrame in two ways

Introduce two ways Spark converts RDD into DataFrame.
1. By using the case class method, but in scala 2.10, a case class with a maximum of 22 fields is supported. This point needs to be noted
. 2. It is through the StructType method inside spark, which converts the ordinary RDD into a DataFrame and replaces it with a
DataFrame. , you can use SparkSQL to perform data filtering and other operations.

The following code speaks directly

package spark_rdd

import org.apache.spark._
import org.apache.spark.sql._
import org.apache.spark.sql.types._

object SparkRDDtoDF {
  
  //StructType and convert RDD to DataFrame
  def rddToDF(sparkSession : SparkSession):DataFrame = {
    //Set the schema structure
    val schema = StructType(
      Seq(
        StructField("name",StringType,true)          
        ,StructField("age",IntegerType,true)
      )
    )
    val rowRDD = sparkSession.sparkContext
      .textFile("file:/E:/scala_workspace/z_spark_study/people.txt",2)
      .map( x => x.split(",")).map( x => Row(x(0),x(1).trim().toInt))  
    sparkSession.createDataFrame(rowRDD,schema)
  }
  
  //use case class Person
  case class Person(name:String,age:Int)
  def rddToDFCase(sparkSession : SparkSession):DataFrame = {
    //Import the concealment operation, otherwise the RDD cannot call the toDF method
    import sparkSession.implicits._
    val peopleRDD = sparkSession.sparkContext
      .textFile("file:/E:/scala_workspace/z_spark_study/people.txt",2)
      .map( x => x.split(",")).map( x => Person(x(0),x(1).trim().toInt)).toDF()
    peopleRDD
  }
  
  def main(agrs : Array[String]):Unit = {
      val conf = new SparkConf().setMaster("local[2]")
      conf.set("spark.sql.warehouse.dir","file:/E:/scala_workspace/z_spark_study/")
      conf.set("spark.sql.shuffle.partitions","20")
      val sparkSession = SparkSession.builder().appName("RDD to DataFrame")
            .config(conf).getOrCreate()
       //Set the level of Spark log4j by means of code
      sparkSession.sparkContext.setLogLevel("WARN")
      import sparkSession.implicits._
      //use case class convert RDD to DataFrame
      //val peopleDF = rddToDFCase(sparkSession)
      
      //use StructType  convert RDD to DataFrame
      val peopleDF = rddToDF(sparkSession)
      peopleDF.show()
      peopleDF.select($"name",$"age").filter($"age">20).show()
      
  }
  
}

Guess you like

Origin http://10.200.1.11:23101/article/api/json?id=327033676&siteId=291194637