kafka序列化

版权声明:本文为博主原创文章,遵循 CC 4.0 BY-SA 版权协议,转载请附上原文出处链接和本声明。
本文链接: https://blog.csdn.net/weixin_42558056/article/details/98225918

kafka序列化

自定义序列化器:

//首先创建一个简单的类表示客户
public class Customer{
private int customerID;
private String customerName;
public Customer(int ID,String name){
this.customerID=ID;
this.customerName=name;
}
public int getID(){
return customerID;
}
public String getName(){
return customerName;
}
}

构建序列化器:

import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Serializer;
import java.nio.ByteBuffer;
import java.util.Map;
public class CustomerSerializer implements Serializer<Customer> {
     @Override
     public void configure(Map configs, boolean iskey) {
           // 不作任何配置
     }
     @Override
     public byte[] serialize(String topic, Customer data) {
           try {
                byte[] serializedName;
                int stringSize;
                if (data == null) {
                      return null;
                } else {
                      if (data.getName() != null) {
                           serializedName = data.getName().getBytes("UTF-8");
                           stringSize = serializedName.length;
                      } else {
                           serializedName = new byte[0];
                           stringSize = 0;
                      }
                }
                ByteBuffer buffer = ByteBuffer.allocate(4 + 4 + stringSize);
                buffer.putInt(data.getID());
                buffer.putInt(stringSize);
                buffer.put(serializedName);
                return buffer.array();
           } catch (Exception e) {
                throw new SerializationException("Error when serializing Customer to byte[]" + e);
           }
     }
     public void close() {
     }
}

在Kafuka里使用Avro

public class AvroProducer {
     public static void main(String[] args) {
           Properties props=new Properties();
        props.put("bootstrap.servers","localhost:9092");
        props.put("key.serializer","io.confluent.kafka.serializers.KafkaAvroSerializer");
        props.put("value.serializer","io.confluent.kafka.serializers.KafkaAvroSerializer");
        props.put("schema.registry.url",schemaUrl);
        String topic="customerContacts";
        Producer<String,Customer>producer=new KafkaProducer<String,Customer>(props);
        while(true) {
            Customer customer=CustomerGenerator.getNext();
            ProducerRecord<String,Customer> record=new ProducerRecord<>(topic,Customer.getId(),customer);
            producer.send(record);
        }
      
     }
}

猜你喜欢

转载自blog.csdn.net/weixin_42558056/article/details/98225918
今日推荐