Spark连接mysql数据库

package com.menkoudai.loan.data.spark.scala

import java.sql.{Connection, DriverManager, PreparedStatement}
import java.util.Properties
import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

object MysqlUtils {
  def main(args: Array[String]): Unit ={
    var conf = new SparkConf().setAppName("mysqlTest").setMaster("local")
    val sc = new SparkContext(conf)
    val ssc = new SQLContext(sc)

    //第一种连接方法
    val url = "jdbc:mysql://localhost:3306/test?characterEncoding=UTF-8"
    val username ="root"
    val password ="mysql1234"
    val prop = new Properties()
    prop.put("user", username)
    prop.put("password", password)
    val df : DataFrame = ssc.read.jdbc(url, "user_info", prop)
    val dataWrite = df.select("id", "username", "age", "email", "address").collect().foreach(row => {
     println((row.get(1), row.get(2), row.get(3), row.get(4)))
    })
    //df.show()

/*
//第二种连接方法
val jdbcDF = ssc.read
.format("jdbc")

.option("url", "jdbc:mysql://localhost:3306/test?characterEncoding=UTF-8")

        //.option("dbtable", "(select username, age, email, address from user_info  ) as user_info")

.option("dbtable", "user_info")
.option("user", "root")
.option("password", "mysql1234")
.load()
jdbcDF.show()*/

sc.stop() 
  }
}

猜你喜欢

转载自blog.csdn.net/niuchenliang524/article/details/80745949
今日推荐