Spark的JDBC编写

package Spark_day03

import java.sql.DriverManager

import org.apache.spark.rdd.JdbcRDD
import org.apache.spark.{SparkConf, SparkContext}

object JdbcRDD1 {

  def main(args: Array[String]): Unit = {

    val sparkConf = new SparkConf().setMaster("local").setAppName("jdbcRDD")
    val sc = new SparkContext(sparkConf)

    // 数据库URL地址
    val dbUrl = "jdbc:mysql://10.0.154.201:3306/test?useUnicode&characterEncoding"
    val user = "root"
    val password = "root"

    // 获取某表中的字段
    val sql = "select id,name from student where id >? and id <? ;"
    // 取出数据库某表中的数据放到RDD里
    val conn = () => {

      Class.forName("com.mysql.jdbc.Driver").newInstance()
      DriverManager.getConnection(dbUrl, user, password)
    }

	// 上限和下限以及分区数
    val jdbcRdd = new JdbcRDD(sc,conn,sql,1,100,1,res =>{

      println(sql)
      val id = res.getInt("id")
      val name = res.getString("name")
      (id,name)
    })

    jdbcRdd.foreach(println)
    sc.stop()

  }
}

猜你喜欢

转载自blog.csdn.net/qq_41212491/article/details/87921080