读取kafka数据
val spark: SparkSession = SparkSession.builder()
.appName(this.getClass.getSimpleName.stripSuffix("$"))
.master("local[2]")
.config("spark.sql.shuffle.partitions", "2")
.getOrCreate()
import spark.implicits._
val kafkaStreamDF: DataFrame = spark.readStream
.format("kafka")
.option("kafka.bootstrap.servers", "node1.itcast.cn:9092")
.option("subscribe", "wordsTopic")
.load()
消费kafka数据
val query: StreamingQuery = etlStreamDF
.writeStream
.queryName("query-state-etl")
.outputMode(OutputMode.Append())
.trigger(Trigger.ProcessingTime(0))
.format("kafka")
.option("kafka.bootstrap.servers", "node1.itcast.cn:9092")
.option("topic", "etlTopic")
.option("checkpointLocation", "datas/ckpt-kafka/10001")
.start()