Spring Kafka The class is not in the trusted packages

后端 未结 6 1165
北荒
北荒 2020-12-31 03:20

In my Spring Boot/Kafka application before the library update, I used the following class org.telegram.telegrambots.api.objects.Update in order to post messages

6条回答
  •  慢半拍i
    慢半拍i (楼主)
    2020-12-31 03:36

    There are two key points should be mentioned.

    1. There are two separated project for Producer and Consumer.
    2. Then sending message(value) is an Object type rather primitive type.

    The problem is that the producing message object is not available in consumer side because those are two separate projects.

    Two overcome this issue please follow below mention steps in Spring boot Producer and Consumer applications.

    ----Producer App -------------

    ** Producer Configuration Class **

    import com.kafka.producer.models.Container;    
    import org.apache.kafka.clients.producer.ProducerConfig;
    import org.apache.kafka.common.serialization.StringSerializer;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.kafka.core.DefaultKafkaProducerFactory;
    import org.springframework.kafka.core.KafkaTemplate;
    import org.springframework.kafka.core.ProducerFactory;
    import org.springframework.kafka.support.serializer.JsonSerializer;
    import java.util.HashMap;
    import java.util.Map;
    
    @Configuration
    public class KafkaProducerConfig {
    
    @Bean
    public ProducerFactory producerFactory(){
    
        Map config = new HashMap<>();
    
    config.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
    config.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
    config.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
    
        return new DefaultKafkaProducerFactory(config);
    }
    
    @Bean
    public KafkaTemplate kafkaTemplate(){
        return new KafkaTemplate<>(producerFactory());
    }
    }
    

    Note : Container is the custom Object to be posted in a kafka topic.


    ** Producer Class **

    import com.kafka.producer.models.Container;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import org.springframework.beans.factory.annotation.Autowired;
    import org.springframework.kafka.core.KafkaTemplate;
    import org.springframework.kafka.support.KafkaHeaders;
    import org.springframework.messaging.Message;
    import org.springframework.messaging.support.MessageBuilder;
    import org.springframework.stereotype.Service;
    
    @Service
    public class Producer {
    
    private static final Logger LOGGER = LoggerFactory.getLogger(Producer.class);
    private static final String TOPIC = "final-topic";
    
    @Autowired
    private KafkaTemplate kafkaTemplate;
    
    public void sendUserMessage(Container msg) {
        LOGGER.info(String.format("\n ===== Producing message in JSON ===== \n"+msg));
        Message message = MessageBuilder
                .withPayload(msg)
                .setHeader(KafkaHeaders.TOPIC, TOPIC)
                .build();
        this.kafkaTemplate.send(message);
    }
    }
    

    ** Producer Controller **

    import com.kafka.producer.models.Container;
    import com.kafka.producer.services.Producer;
    import org.springframework.beans.factory.annotation.Autowired;
    import org.springframework.web.bind.annotation.*;
    
    @RestController
    @RequestMapping("/message")
    public class MessageController {
    
    @Autowired
    private Producer producer;
    
    @PostMapping(value = "/publish")
    public String sendMessageToKafkaTopic(@RequestBody Container containerMsg) {
        this.producer.sendUserMessage(containerMsg);
        return "Successfully Published !!";
    }
    }
    

    Note: The message with type Container will be published to the kafka topic name :final-topic as JSON message.

    ===============================================================================

    -- Consumer App --

    ** Configuration Class **

    import com.kafka.consumer.models.Container;
    import org.apache.kafka.clients.consumer.ConsumerConfig;
    import org.apache.kafka.common.serialization.StringDeserializer;
    import org.springframework.context.annotation.Bean;
    import org.springframework.context.annotation.Configuration;
    import org.springframework.kafka.annotation.EnableKafka;
    import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
    import org.springframework.kafka.core.ConsumerFactory;
    import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
    import org.springframework.kafka.support.serializer.JsonDeserializer;
    import java.util.HashMap;
    import java.util.Map;
    
    @Configuration
    @EnableKafka
    public class KafkaConsumerOneConfig {
    
    @Bean
    public ConsumerFactory consumerFactory(){
        JsonDeserializer deserializer = new JsonDeserializer<>(Container.class);
        deserializer.setRemoveTypeHeaders(false);
        deserializer.addTrustedPackages("*");
        deserializer.setUseTypeMapperForKey(true);
    
        Map config = new HashMap<>();
    
        config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        config.put(ConsumerConfig.GROUP_ID_CONFIG, "group_one");
        config.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
        config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
        config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, deserializer);
    
        return new DefaultKafkaConsumerFactory<>(config, new StringDeserializer(), deserializer);
    }
    
    @Bean
    public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory(){
        ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(consumerFactory());
        return factory;
    
    }
    }
    

    Note: Here you can see, instead of using default JsonDeserializer() we have to use custom JsonDeserializer to consume Container object type Json Messages from final-topic(topic name).


    ** Consumer Service **

    import com.kafka.consumer.models.Container;
    import org.slf4j.Logger;
    import org.slf4j.LoggerFactory;
    import org.springframework.kafka.annotation.KafkaListener;
    import org.springframework.messaging.MessageHeaders;
    import org.springframework.messaging.handler.annotation.Headers;
    import org.springframework.messaging.handler.annotation.Payload;
    import org.springframework.stereotype.Service;
    import java.io.IOException;
    
    @Service
    public class ConsumerOne {
    
    private final Logger LOGGER = LoggerFactory.getLogger(ConsumerOne.class);
    
    @KafkaListener(topics = "final-topic", groupId = "group_one", containerFactory = "kafkaListenerContainerFactory")
    public void consumeUserMessage(@Payload Container msg, @Headers MessageHeaders headers) throws IOException {
        System.out.println("received data in Consumer One ="+ msg.getMessageTypes());
    }
    }
    

提交回复
热议问题