SparkStreaming之优雅关闭

package com.atguigu.day09

import java.net.URI

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{
    
    FileSystem, Path}
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{
    
    Seconds, StreamingContext}

/**
  * @ClassName: generalclose
  * @Description:设置优雅关闭
  * 使用外部文件系统来控制内部程序关闭。执行完当前的查询再关闭ssc
  * @Author: kele
  * @Date: 2021/2/19 13:44
  **/
object generalclose {
    
    

  def main(args: Array[String]): Unit = {
    
    

    //1、创建streamingContext,并设置优雅关闭"spark.streaming.stopGracefullyOnShutdown"="true"
    val ssc = new StreamingContext(new SparkConf().setMaster("local[4]").setAppName("closegeneral").set("spark.streaming.stopGracefullyOnShutdown","true"),Seconds(5))

    ssc.sparkContext.setLogLevel("error")

    //2、读取数据
    val ds = ssc.socketTextStream("hadoop102",9999)

    //3、数据处理
    ds.flatMap(_.split(" "))
      .map((_,1))
      .reduceByKey(_+_)
      .print()

    //4、开启程序
    ssc.start()

    //5、该线程用来检测相应条件下关闭ssc
    new Thread(){
    
    
      override def run(): Unit = {
    
    

        val fs = FileSystem.get(new URI("hdfs://hadoop102:8020"),new Configuration(),"atguigu")

        //导入break的依赖包
        import scala.util.control.Breaks._

        breakable({
    
    

          while(true){
    
    
            //判断hdfs上ooo文件夹是否存在,如果ooo文件夹不存在,则关闭ssc
            if( !fs.exists(new Path("hdfs://hadoop102:8020/input/ooo")) ){
    
    

              //设置为优雅关闭
              ssc.stop(true,true)
              break()
            }
          }

        })
      }
      //开启检查停止
    }.start()

    //6、阻塞程序
    ssc.awaitTermination()
  }
}

猜你喜欢

转载自blog.csdn.net/qq_38705144/article/details/113866919