Spark SQL reads and writes data to MySQL

Read data

method one

val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[*]")
val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
spark.read
      .option("driver", "com.mysql.jdbc.Driver")
      .option("url", "jdbc:mysql://localhost:3306/student")
      .option("user", "root")
      .option("password", "root")
      .option("dbtable", "stuinfo")
      .format("jdbc").load().show()
spark.stop()

Way two

val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[*]")
val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
spark.read
      .format("jdbc")
      .options(Map(
        "driver" -> "com.mysql.jdbc.Driver",
        "url" -> "jdbc:mysql://localhost:3306/student?user=root&password=root",
        "dbtable" -> "stuinfo"
      ))
      .load().show()
spark.stop()

Way three

val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[*]")
val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
val prop: Properties = new Properties()
    prop.setProperty("driver", "com.mysql.jdbc.Driver")
    spark.read.jdbc(
      "jdbc:mysql://localhost:3306/student?user=root&password=root",
      "stuinfo",
      prop
      ).show()
spark.stop()

Write data

method one

val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[*]")
val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
import spark.implicits._

val rdd: RDD[(String, String)] = spark.sparkContext.makeRDD(List(("乔峰", "男"), ("段誉", "男"),
  ("虚竹", "男")))
val df: DataFrame = rdd.toDF("sname","gender")
df.write
        .format("jdbc")
        .option("driver", "com.mysql.jdbc.Driver")
        .option("url", "jdbc:mysql://localhost:3306/student")
        .option("user", "root")
        .option("password", "root")
        .option("dbtable", "stuinfo")
        .mode(SaveMode.Append)
        .save()
spark.stop()

Way two

val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[*]")
val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
import spark.implicits._

val rdd: RDD[(String, String)] = spark.sparkContext.makeRDD(List(("乔峰", "男"), ("段誉", "男"),
  ("虚竹", "男")))
val df: DataFrame = rdd.toDF("sname","gender")
df.write
        .format("jdbc")
        .options(Map(
          "driver" -> "com.mysql.jdbc.Driver",
          "url" -> "jdbc:mysql://localhost:3306/student?user=root&password=root",
          "dbtable" -> "stuinfo"
        ))
        .mode(SaveMode.Append)
        .save()
spark.stop()

Way three

val conf: SparkConf = new SparkConf().setAppName(this.getClass.getSimpleName).setMaster("local[*]")
val spark: SparkSession = SparkSession.builder().config(conf).getOrCreate()
import spark.implicits._

val rdd: RDD[(String, String)] = spark.sparkContext.makeRDD(List(("乔峰", "男"), ("段誉", "男"),
  ("虚竹", "男")))
val df: DataFrame = rdd.toDF("sname","gender")
val prop: Properties = new Properties()
    prop.setProperty("driver", "com.mysql.jdbc.Driver")
    df.write.mode(SaveMode.Append).jdbc("jdbc:mysql://localhost:3306/student?user=root&password=root",
      "stuinfo",
      prop)
spark.stop()

Guess you like

Origin blog.csdn.net/FlatTiger/article/details/115289114