问题
I'm trying to connect Bitnami Certified Kafka AMI with Elastic Beanstalk nodejs environment using kafka-node, how to do that?
After installing apache Kafka locally and testing it with Kafka-node successfully, I wanted to test my app with AWS kafka server.
I configured my AWS Bitnami Certified Kafka AMI listeners to match my Public DNS (IPv4) and exposed the 9092 and 2181 ports in inbound rules like this:
Type protocol port source
Custom TCP Rule TCP 9092 0.0.0.0/0
Custom TCP Rule TCP 2181 0.0.0.0/0
#server.properties
listeners=SASL_PLAINTEXT://<Public DNS (IPv4) from AWS>:9092
# EXAMPLE:
# listeners = PLAINTEXT://your.host.name:9092
#listeners=PLAINTEXT://<Public DNS (IPv4) from AWS>:9092
# Hostname and port the broker will advertise to producers and consumers.
# If not set it uses the value for "listeners" if configured. Otherwise, it
# will use the value returned from
# java.net.InetAddress.getCanonicalHostName().
advertised.listeners=SASL_PLAINTEXT://<Public DNS (IPv4) from AWS>:9092
# root directory for all kafka znodes.
zookeeper.connect=<Public DNS (IPv4) from AWS>:2181
I'm setting my producer using kafka-node like this:
var Producer = kafka.Producer,
client = new kafka.KafkaClient({ kafkaHost: <kafka-public-ip>:9092}),
producer = new Producer(client);
producer.on('ready', function () {
console.log('Producer is ready');
});
producer.on('error', function (err) {
console.log('Producer is in error state');
console.log(err);
})
kafka-node is throwing a timeout error Error: Unable to find available brokers to try
I have tested the default port 22 with telnet open <kafka-instance-public-ip> 22
and it worked, but port 9092 is not working.
Bitnami Kafka AMI questions in summary:
1- How to configure Bitnami Kafka AMI with AWS to be accessed remotely
回答1:
so how i have this set up is the following: these are 2 files that can run and only require express and kafka-node@3.0.1
// consumer.js
const kafka = require('kafka-node'),
Consumer = kafka.Consumer,
client = new kafka.Client('<IP of kafka server>:2181');
consumer = new Consumer(client,
[{ topic: '<>'}]
);
console.log('listening')
consumer.on('message', function (message) {
console.log(message);
});
consumer.on('error', function (err) {
console.log('Error:',err);
})
consumer.on('offsetOutOfRange', function (err) {
console.log('offsetOutOfRange:',err);
})
This is connecting to the zookeeper so i think you would need to have version 3.0.1 of kafka-node so when you install it would be
npm install --save kafka-node@3.0.1
to connect straight to the broker you might have to figure it out on your own.
// producer.js
const express = require('express');
const kafka = require('kafka-node');
const app = express();
const bodyParser = require('body-parser');
app.use(bodyParser.json()); // to support JSON-encoded bodies
app.use(bodyParser.urlencoded({ extended: true }));
const { Producer } = kafka;
const client = new kafka.Client('<IP of kafka server>:2181');
const producer = new Producer(client);
producer.on('ready', () => {
console.log('Producer is ready');
});
producer.on('error', err => {
console.log('Producer is in error state');
console.log(err);
});
app.post('/kafkaproducer', (req, res) => {
const sentMessage = JSON.stringify(req.body.message);
const payloads = [
{ topic: req.body.topic, messages: sentMessage, partition: 0 },
];
producer.send(payloads, (err, data) => {
if (data) {
res.json(data);
}
if (err) {
res.send(err);
}
});
});
app.get('/',function(req,res){
res.json({greeting:'Kafka Producer'})
});
app.listen(5001,function(){
console.log('Kafka producer running at 5001')
})
you can use postman to send a post http request to http://localhost:5001/kafkaproducer in the following format
{
topic: '<TOPIC YOU WANT>',
messages: '<Can be any format you want even a json but i would advise just
testing with a basic string at first>'
}
then the consumer will pick up the message but make sure the topic has been created on the kafka server and that you have the correct topic on your consumer.
on a side note, if you went with a EC2 instance you could combine them
const express = require('express');
const kafka = require('kafka-node');
const app = express();
const bodyParser = require('body-parser');
app.use(bodyParser.json()); // to support JSON-encoded bodies
app.use(bodyParser.urlencoded({ extended: true }));
const { Producer, Consumer } = kafka;
const client = new kafka.Client('13.56.240.35:2181');
const producer = new Producer(client);
consumer = new Consumer(client,
[{ topic: 'memes-to-mturk'}]
);
producer.on('ready', () => {
console.log('Producer is ready');
});
producer.on('error', err => {
console.log('Producer is in error state');
console.log(err);
});
consumer.on('message', function (message) {
console.log(message);
});
consumer.on('error', function (err) {
console.log('Error:',err);
})
app.get('/',function(req,res){
res.json({greeting:'Kafka Producer'})
});
app.post('/kafkaproducer', (req, res) => {
const sentMessage = JSON.stringify(req.body.message);
console.log(sentMessage);
const payloads = [
{ topic: req.body.topic, messages: sentMessage, partition: 0 },
];
producer.send(payloads, (err, data) => {
if (data) {
res.json(data);
}
if (err) {
res.send(err);
}
});
});
app.listen(5002,function(){
console.log('Kafka producer running at 5001')
})
来源:https://stackoverflow.com/questions/56650664/how-to-connect-aws-bitnami-certified-kafka-ami-with-elastic-beanstalk-nodejs-env