I have this in my docker-compose.yml
version '3'
services:
app:
build:
context: .
dockerfile: app/Dockerfile
ports:
- 8000:8000
environment:
DB_NAME: postgres
DB_USER: postgres
DB_PASS: postgres
DB_HOST: db
DB_PORT: 5432
SECRET_KEY: my_secret_key
DEBUG: 'True'
depends_on:
- db
db:
image: postgres:11.1-alpine
environment:
DB_NAME: postgres
DB_USER: postgres
DB_PASS: postgres
DB_PORT: 5432
ports:
- 5432:5432
volumes:
- db:/var/lib/postgresql/data
restart: always
zookeeper:
image: wurstmeister/zookeeper:latest
ports:
- "2181:2181"
kafka:
image: wurstmeister/kafka:latest
ports:
- "9092:9092"
expose:
- "9093"
environment:
KAFKA_ADVERTISED_HOST_NAME: kafka
KAFKA_ADVERTISED_LISTENERS: INSIDE://kafka:9093,OUTSIDE://localhost:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INSIDE:PLAINTEXT,OUTSIDE:PLAINTEXT
KAFKA_LISTENERS: INSIDE://0.0.0.0:9093,OUTSIDE://0.0.0.0:9092
KAFKA_INTER_BROKER_LISTENER_NAME: INSIDE
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_CREATE_TOPICS: "msg_topic:1:1"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
listener:
build:
context: .
dockerfile: listener/Dockerfile
depends_on:
- kafka
- zookeeper
- app
volumes:
db:
this is the producer
def prod(msg):
producer = KafkaProducer(bootstrap_servers='kafka:9092')
data = {
'msg': msg.text
}
serialized_data = pickle.dumps(data, pickle.HIGHEST_PROTOCOL)
producer.send('msg_topic', serialized_data)
print('message was sent')
return HttpResponse(200)
and consumer
def cons():
consumer = KafkaConsumer('msg_topic',
bootstrap_servers=['kafka:9092'])
for message in consumer:
deserialized_data = pickle.loads(message.value)
result = deserialized_data['msg']
all images are running and when I try send message from producer, consumer doesn't receive it. What's the problem? I spent a lot of time on this, can anyone help please? Could this be a port issue? To be honest, I don't understand what ports should i use