SparkStreaming foreachPartition实现输出数据到MYSQL

import java.sql.DriverManager

import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Seconds, StreamingContext}

object foreachTOMysql {

  def writeToMysql()={
    val conf = new SparkConf().setMaster("local[*]").setAppName("MYsql")
    val ssc = new StreamingContext(conf,Seconds(5))
    val ItemStream = ssc.socketTextStream("192.168.59.100",8888)
    val ItemPairs = ItemStream.map({
      line =>
        (line.split(",")(0),1)
    })
    val ItemCount = ItemPairs.reduceByKeyAndWindow((v1:Int,v2:Int)=>v1+v2,Seconds(60),Seconds(10))
    val hottestWord = ItemCount.transform(itemRDD =>{
        val top3 = itemRDD.map(pair=>(pair._2,pair._1))
          .sortByKey(false)
          .map(pair=>(pair._2,pair._1))
          .take(3)
      ssc.sparkContext.makeRDD(top3)
      })
    hottestWord.foreachRDD(rdd=>{
      rdd.foreachPartition(partitionOfReconds=>{
        val url = "jdbc:mysql://192.168.59.100:3306/spark"
        val username = "root"
        val password = "123456"
        Class.forName("com.mysql.jdbc.Driver")
        val conn = DriverManager.getConnection(url,username,password)
        conn.prepareStatement("").executeUpdate()
        conn.setAutoCommit(false)
        val stmt = conn.createStatement()
        partitionOfReconds.foreach(recond=>{
          stmt.addBatch("")
        })
        stmt.executeBatch()
        conn.commit()
      })
    })
    ssc.start()
    ssc.awaitTermination()
    ssc.stop()
  }
}

猜你喜欢

转载自blog.csdn.net/qq_38704184/article/details/94572644
今日推荐