Kafka custom serialization Manufacturer

Kafka the producer sequence into binary objects pushed Broker, the following is an example of custom serialization, the sequence of a User object;

First, the introduction of jackson-mapper-asl

<dependency>
    <groupId>org.codehaus.jackson</groupId>
    <artifactId>jackson-mapper-asl</artifactId>
    <version>1.9.12</version>
</dependency>

It is then defined to be the sequence of the entity classes:

package cn.org.fubin;

public class User {
    private String firstName;
    private String lastName;
    private int age;
    private String address;

    public User() {
    }

    public User(String firstName, String lastName, int age, String address) {
        this.firstName = firstName;
        this.lastName = lastName;
        this.age = age;
        this.address = address;
    }

    public String getFirstName() {
        return firstName;
    }

    public void setFirstName(String firstName) {
        this.firstName = firstName;
    }

    public String getLastName() {
        return lastName;
    }

    public void setLastName(String lastName) {
        this.lastName = lastName;
    }

    public int getAge() {
        return age;
    }

    public void setAge(int age) {
        this.age = age;
    }

    public String getAddress() {
        return address;
    }

    public void setAddress(String address) {
        this.address = address;
    }

    @Override
    public String toString() {
        return "User{" +
                "firstName='" + firstName + '\'' +
                ", lastName='" + lastName + '\'' +
                ", age=" + age +
                ", address='" + address + '\'' +
                '}';
    }
}
View Code

Next, create a sequence of classes that implement the interface Kafka Serializer provided by the client:

import org.apache.kafka.common.serialization.Serializer;
import org.codehaus.jackson.map.ObjectMapper;

import java.io.IOException;
import java.util.Map;

public class UserSerializer implements Serializer {

    private ObjectMapper objectMapper;

    public void configure(Map configs, boolean isKey) {
        objectMapper = new ObjectMapper();
    }

    public byte[] serialize(String topic, Object data) {
        byte[] ret = null;
        try {
            ret = objectMapper.writeValueAsString(data).getBytes("utf-8");
        } catch (IOException e) {
            System.out.println ( "failed serialization" );
            e.printStackTrace ();
        }
        Return the right;
    }

    public void close() {

    }
}

Kafka realized default provides the following:

 

import org.apache.kafka.clients.producer.*;
import org.apache.kafka.common.errors.RetriableException;

import java.util.Properties;
import java.util.concurrent.ExecutionException;

/**
 *
 * Retry abnormal
 * 1. Partition copy is not available
 * 2. Controller is currently unavailable
 * 3. Network transient faults
 *
 * Self-healing, than the number of retries required discretion
 *
 *
 * Do not retry abnormal
 * 1. Send message oversized
 * 2. The sequence of abnormal failure
 * 3. Other types of abnormal
 *
 *
 */

public  class KafkaProducerDemo {

    public static void main(String[] args) throws ExecutionException, InterruptedException {
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "localhost:9092,localhost:9093,localhost:9094");
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        properties.put("value.serializer", "cn.org.fubin.UserSerializer");

        properties.put("acks", "-1");
        System.out.println(ProducerConfig.ACKS_CONFIG);
        properties.put("retries", "3");
        properties.put("batch.size", 1048576);
        properties.put("linger.ms", 10);
        properties.put("buffer.memory", "33554432");
        System.out.println(ProducerConfig.BUFFER_MEMORY_CONFIG);
        properties.put(ProducerConfig.COMPRESSION_TYPE_CONFIG,"lz4");
        properties.put("max.block.ms", "3000");

        String topic = "test-topic";
        Producer<String,User> producer = new KafkaProducer<String, User>(properties);

        User user = new User("a","b",23,"china");
        ProducerRecord<String ,User> record = new ProducerRecord<String, User>(topic,user);
        producer.send(record).get();
        producer.close();

    }

}

 

 Then, in the main class declared good sequence specified class, and transmits a User entity:

 

Guess you like

Origin www.cnblogs.com/fubinhnust/p/11967891.html