SparkSql怎么把数据写到文件或者是某个数据库以及用API的方式查数据

package com.sparksql

import java.util.Properties

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{
    
    DataFrame, SparkSession}

object DataFrameFunction {
    
    
  def main(args: Array[String]): Unit = {
    
    
    //SparkSession
    val spark: SparkSession = SparkSession
      .builder()
      .master("local")
      .appName("DataFrameFromStuctType")
      .getOrCreate()

    val lineRDD: RDD[String] = spark.sparkContext.textFile("C:\\people.txt")

    val peopleRDD = lineRDD.map(line => {
    
    
      val linearray: Array[String] = line.split(",")
      People(linearray(0), linearray(1).trim.toInt)
    })

    import spark.implicits._
    val peopleDF: DataFrame = peopleRDD.toDF() //

    //DataFame API分析风格(DataFrame方法:select() where()等)
    //打印表结构,打印DF的元数据信息
    //peopleDF.printSchema()
	
    //select * from pepole where age > 25
    //peopleDF.select("name").where(peopleDF.col("age")>25).show()
	
    //peopleDF.createOrReplaceTempView("people6")
    //val resultDF: DataFrame = spark.sql("select name from people6 where age >25")

    //将结果保存到文件
    //resultDF.write.text("sqltext_result")

    //将结果保存到mysql
    /*val properties = new Properties()
    properties.setProperty("user","root")
    properties.setProperty("password","")
    resultDF.write.jdbc("jdbc:mysql://localhost:3306/mydb","people6",properties)*/

    spark.stop()

  }

}

猜你喜欢

转载自blog.csdn.net/dudadudadd/article/details/113868653