创建Maven项目
- 配置Pom.xml文件
<!- 根据自己使用的版本进行修改版本号 ->
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-scala_2.11</artifactId>
<version>1.7.2</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-scala_2.11</artifactId>
<version>1.7.2</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka_2.11</artifactId>
<version>1.7.2</version>
</dependency>
- 实现scala Object实例
import java.util.Properties
import org.apache.flink.api.common.serialization.SimpleStringSchema
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.kafka.{
FlinkKafkaConsumer, FlinkKafkaProducer}
import org.apache.kafka.clients.consumer.ConsumerConfig
object SinkKafka {
def main(args: Array[String]): Unit = {
val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
val prop = new Properties()
prop.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.**.**:9092")
prop.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"flink-kafka-demo")
prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringSerializer")
prop.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"org.apache.kafka.common.serialization.StringDeserializer")
prop.setProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"latest")
//获取kafka中数据
val KafkaToStream: DataStream[String] = env.addSource(new FlinkKafkaConsumer[String]("senserin",new SimpleStringSchema(),prop))
//将获取的数据传到kafka
KafkaToStream.addSink(new FlinkKafkaProducer[String]("192.168.**.**:9092","senserout",new SimpleStringSchema()))
env.execute("kafkademo2")
}
}
创建kafka Topic
#创建输入Topic:senserin
kafka-topics.sh --create --zookeeper 192.168.**.**:2181 --topic senserin --partitions 1 --replication-factor 1
#创建输出Topic:senserout
kafka-topics.sh --create --zookeeper 192.168.**.**:2181 --topic senserout --partitions 1 --replication-factor 1
启动生产者、消费者
#启动senserin的生产者
kafka-console-producer.sh --topic senserin --broker-list 192.168.**.**:9092
#启动senserout的消费者
kafka-console-consumer.sh --topic senserout--bootstrap-server 192.168.**.**:9092 --from-beginning
启动Flink Stream
从生产者输入数据
从消费者查看Flink传来的数据
这样就完成了Flink读取Kafka中的数据与Flink传输数据至Kafka中的操作