My KafkaProducer is able to use KafkaAvroSerializer to serialize objects to my topic. However, KafkaConsumer.poll() returns deserializ
Edit : reflect data support got merged (see below)
To add to Chin Huang's answer, for minimal code and better performance, you should probably implement it this way :
/**
* Extends deserializer to support ReflectData.
*
* @param
* value type
*/
public abstract class SpecificKafkaAvroDeserializer extends AbstractKafkaAvroDeserializer implements Deserializer {
private final Schema schema;
private Class type;
private DecoderFactory decoderFactory = DecoderFactory.get();
protected SpecificKafkaAvroDeserializer(Class type, Map props) {
this.type = type;
this.schema = ReflectData.get().getSchema(type);
this.configure(this.deserializerConfig(props));
}
public void configure(Map configs) {
this.configure(new KafkaAvroDeserializerConfig(configs));
}
@Override
protected T deserialize(
boolean includeSchemaAndVersion,
String topic,
Boolean isKey,
byte[] payload,
Schema readerSchemaIgnore) throws SerializationException {
if (payload == null) {
return null;
}
int schemaId = -1;
try {
ByteBuffer buffer = ByteBuffer.wrap(payload);
if (buffer.get() != MAGIC_BYTE) {
throw new SerializationException("Unknown magic byte!");
}
schemaId = buffer.getInt();
Schema schema = schemaRegistry.getByID(schemaId);
Schema readerSchema = ReflectData.get().getSchema(type);
int start = buffer.position() + buffer.arrayOffset();
int length = buffer.limit() - 1 - idSize;
SpecificDatumReader reader = new SpecificDatumReader(schema, readerSchema);
BinaryDecoder decoder = decoderFactory.binaryDecoder(buffer.array(), start, length, null);
return reader.read(null, decoder);
} catch (IOException e) {
throw new SerializationException("Error deserializing Avro message for id " + schemaId, e);
} catch (RestClientException e) {
throw new SerializationException("Error retrieving Avro schema for id " + schemaId, e);
}
}
}