Kafka Avro序列化器和解串器

时间:2018-08-17 12:20:49

标签: java apache-kafka avro

我想基于Avro实现通用的Kafka序列化器/解串器。不使用对象应该是通用的。要求是使用类似GenericRecord.get(“ myValue”)

这是我的序列化器的代码

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Map;

import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.BinaryEncoder;
import org.apache.avro.io.DatumWriter;
import org.apache.avro.io.EncoderFactory;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Serializer;

public class AvroSerializer implements Serializer<GenericRecord> {

  public void configure(Map<String, ?> configs, boolean isKey) {

  }

  public byte[] serialize(String topic, GenericRecord data) {
    try {
      byte[] result = null;

      if (data != null) {

        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
        BinaryEncoder binaryEncoder =
            EncoderFactory.get().binaryEncoder(byteArrayOutputStream, null);

        DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(data.getSchema());
        datumWriter.write(data, binaryEncoder);

        binaryEncoder.flush();
        byteArrayOutputStream.close();

        result = byteArrayOutputStream.toByteArray();
      }
      return result;
    } catch (IOException ex) {
      throw new SerializationException(
          "Can't serialize data='" + data + "' for topic='" + topic + "'", ex);
    }
  }

  public void close() {

  }

}

这是反序列化器

import java.util.Arrays;
import java.util.Map;

import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.io.DatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Deserializer;

public class AvroDeserializer implements Deserializer<GenericRecord> {

  @Override
  public void close() {

  }

  @Override
  public void configure(Map<String, ?> configs, boolean isKey) {

  }

  @Override
  public GenericRecord deserialize(String topic, byte[] data) {
    try {
      GenericRecord result = null;

      if (data != null) {
        DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
        Decoder decoder = DecoderFactory.get().binaryDecoder(data, null);
        result = reader.read(null, decoder);
      }
      return result;
     } catch (Exception ex) {
      throw new SerializationException(
      "Can't deserialize data '" + Arrays.toString(data) + "' from topic '" + topic + "'", ex);
    }
  }

}

不幸的是,当Kafka消息反序列化时,我收到了SerializationException

Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition my.topic-0 at offset 7
Caused by: org.apache.kafka.common.errors.SerializationException: Can't deserialize data '[72, -31, 122, 20, -82, 7, 89, 64]' from topic 'my.topic'
Caused by: java.lang.NullPointerException: writer cannot be null!
at org.apache.avro.io.ResolvingDecoder.resolve(ResolvingDecoder.java:80)
at org.apache.avro.io.ResolvingDecoder.<init>(ResolvingDecoder.java:49)
at org.apache.avro.io.DecoderFactory.resolvingDecoder(DecoderFactory.java:307)
at org.apache.avro.generic.GenericDatumReader.getResolver(GenericDatumReader.java:128)
at org.apache.avro.generic.GenericDatumReader.read(GenericDatumReader.java:143)
at com.mydomain.serialization.AvroDeserializer.deserialize(AvroDeserializer.java:34)
at com.mydomain.serialization.AvroDeserializer.deserialize(AvroDeserializer.java:1) 

0 个答案:

没有答案
相关问题