springboot 集成storm的kafka

springboot 集成storm进行实时日志流的计算,先需要集成kafka客户端,用于订阅topic

1.配置yml文件

storm:
props:
topologyName: adLog
topologyWorkers: 1
numTasks: 1
topologyMaxSpoutPending: 5000
topologyMessageTimeoutSecs: 30
spout:
kafkaClientSpout:
parallelismHint: 3
groupId: stormTestGroupId
topics:
- ad-log
bootstrapServers: 127.0.0.1:9092
2.配置kafka类文件
@Setter
@Getter
public abstract class SpoutBuilder {

/**
* 拓扑的并行度:它代表着一个组件的初始 executor (也是线程)数量
* the number of tasks that should be assigned to execute this spout.
* Each task will run on a thread in a process somewhere around the cluster.
* {@link org.apache.storm.topology.TopologyBuilder#setSpout(String, IRichSpout, Number)}
*/
private Integer parallelismHint = 5;

private String id;

abstract IRichSpout buildSpout();
}
 
@Getter
@Setter
@Configuration
@ConfigurationProperties(prefix = "storm.spout.kafkaClientSpout")
public class KafkaClientSpoutBuilder extends SpoutBuilder {

private String bootstrapServers;
private String[] topics;
private String groupId;


@Bean("kafkaClientSpout")
public KafkaSpout buildSpout() {
super.setId("kafkaClientSpout");
KafkaSpoutConfig<String, String> kafkaSpoutConfig = getKafkaSpoutConfig();
return new KafkaSpout(kafkaSpoutConfig);
}

private KafkaSpoutConfig<String, String> getKafkaSpoutConfig() {
KafkaSpoutConfig.Builder<String, String> builder = KafkaSpoutConfig.builder(bootstrapServers, topics);
builder.setGroupId(groupId);
return builder.build();
}

}


猜你喜欢

转载自www.cnblogs.com/flyyu1/p/11447761.html