多易教育KAFKA实战(2)-java生产者客户端API示例代码

案例一  入门实例

/**
 * java客户端模拟生产者生产topic
 * topic是数据的分类主题
 */
public class Producter1 {

    public static void main(String[] args) throws InterruptedException {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");
        // 创建一个生产者对象
        KafkaProducer<String, String> kp = new KafkaProducer<>(p);
        for (int i = 0; i < 1000; i++) {
            Thread.sleep(1000);

            //向 hang=kafka主题中发送消息   主题和消息
            ProducerRecord pr = new ProducerRecord("hang-kafka", "hang" + i);
            kp.send(pr);
            System.out.println("---------------hang" + i + "----------------");
        }

        kp.close();

    }
}

案例一 入门程序(带回调)

回调函数会在producer收到ack时调用,为异步调用,该方法有两个参数,分别是RecordMetadata和Exception,如果Exception为null,说明消息发送成功,如果Exception不为null,说明消息发送失败。

注意:消息发送失败会自动重试,不需要我们在回调函数中手动重试。

/**
 * java客户端模拟生产者生产topic
 * topic是数据的分类主题
 */
public class Producter2 {

    public static void main(String[] args) throws InterruptedException {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");
        // 创建一个生产者对象
        KafkaProducer<String, String> kp = new KafkaProducer<>(p);
        for (int i = 0; i < 1000; i++) {
            Thread.sleep(1000);

            //向 hang=kafka主题中发送消息   主题和消息
            ProducerRecord pr = new ProducerRecord("hang-kafka", "hang" + i);
            kp.send(pr, new Callback() {
                @Override
                public void onCompletion(RecordMetadata recordMetadata, Exception e) {
                    if(e==null){
                        String topic = recordMetadata.topic();
                        long offset = recordMetadata.offset();
                        int p = recordMetadata.partition();
                        System.out.println(topic+"---offset:  "+offset+"---partittion: "+p);
                    }
                }
            });

        }

        kp.close();

    }
}

案例二 将mysql中的数据发送到kafka

/**
 * 将mysql中的数据  发送到kafka中
 */
public class SqlData2KAFKA {
    public static void main(String[] args) throws Exception {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");

        KafkaProducer<String, String> product = new KafkaProducer<>(p);

        // 获取数据库连接对象
        Connection conn = DriverManager.getConnection("jdbc:mysql://localhost:3306/db_doit15", "root", "root");
        PreparedStatement ps = conn.prepareStatement("select * from tb_product ");
        ResultSet rs = ps.executeQuery();
        while (rs.next()) {
            int id = rs.getInt("id");
            String name = rs.getString("name");
            int price = rs.getInt("price");
            String category = rs.getString("category");
            String value = id + "," + name + "," + price + "," + category;
            ProducerRecord<String, String> msg = new ProducerRecord<>("tb_product", value);
            product.send(msg);
        }
        product.close();
        rs.close();
        ps.close();
        conn.close();
    }
}

案例三  将日志中的数据发送到kafka

/**
 * 将日志中的数据发送到kafka中
 */
public class Log2KafkKa {
    public static void main(String[] args) throws Exception {
        Properties p = new Properties();
        p.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        p.setProperty("bootstrap.servers", "lx01:9092,lx02:9092,lx03:9092");
        p.setProperty("acts", "1");
        p.setProperty("partitioner.class", "org.apache.kafka.clients.producer.internals.DefaultPartitioner");
        // 创建一个生产者
        KafkaProducer<String, String> producter = new KafkaProducer<>(p);

        BufferedReader br = new BufferedReader(new FileReader("D:\\data\\flow\\input\\flow.log"));
        String line = null;
        //读取数据   每行的数据为一个消息
        while ((line = br.readLine()) != null) {
            ProducerRecord<String, String> msg = new ProducerRecord<>("flow.log", line);
            // 生产者将消息 发送出去
            producter.send(msg);
        }
        // 是放资源
        br.close();
        producter.close();
    }
}

猜你喜欢

转载自blog.csdn.net/qq_37933018/article/details/106625717