1. Add Kafka Connector dependency
Add in pom.xml
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_${
scala.binary.version}</artifactId>
<version>${
kafka.version}</version>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>1.2.75</version>
</dependency>
2. Start the Kafka cluster
start zookeeper
./bin/zookeeper-server-start.sh config/zookeeper.properties
start kafka
./bin/kafka-server-start.sh config/server.properties
start a consumer
./bin/kafka-console-consumer.sh --bootstrap-server hadoop100:9092 --topic topic_sensor
3. From Flink sink to Kafka
package com.lyh.flink06;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.ConnectedStreams;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.SinkFunction;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
public class SinkToKafka {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(2);
DataStreamSource<String> dataStreamSource = env.fromElements("a-----------------------------", "b*****************************");
DataStreamSource<Integer> integerDataStreamSource = env.fromElements(1, 2);
ConnectedStreams<String, Integer> datain = dataStreamSource.connect(integerDataStreamSource);
datain.getFirstInput().addSink(new FlinkKafkaProducer<String>("hadoop100:9092","topic_sensor",new SimpleStringSchema()));
env.execute();
}
}
After running the program, you can see that the consumer has successfully consumed