kafka/docker-compose.yml

101 lines
2.9 KiB
YAML

services:
kafka:
image: apache/kafka:latest
container_name: kafka-broker
hostname: kafka-broker
restart: unless-stopped
ports:
- "9092:9092"
environment:
# Single-broker KRaft config (broker + controller in one process)
KAFKA_BROKER_ID: 1
KAFKA_PROCESS_ROLES: broker,controller
KAFKA_NODE_ID: 1
KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka-broker:29093
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,CONTROLLER:PLAINTEXT
# Listeners:
# - PLAINTEXT: for other containers in same network
# - PLAINTEXT_HOST: for host / outside
KAFKA_LISTENERS: PLAINTEXT://kafka-broker:29092,CONTROLLER://kafka-broker:29093,PLAINTEXT_HOST://0.0.0.0:9092
# If you connect from OUTSIDE the LXC over LAN,
# change 'localhost' below to the LXC IP (e.g. 192.168.1.50)
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-broker:29092,PLAINTEXT_HOST://10.10.2.18:9092
# Single-broker-safe replication settings
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
# Logs (inside container)
KAFKA_LOG_DIRS: /tmp/kraft-combined-logs
# Any fixed cluster id string is fine for local dev;
# this is the same example used in Confluent's tutorial.
CLUSTER_ID: MkU3OEVBNTcwNTJENDM2Qk
networks:
- kafka-network
kafka-ui:
image: provectuslabs/kafka-ui:latest
container_name: kafka-ui
restart: unless-stopped
depends_on:
- kafka
ports:
- "8080:8080"
environment:
KAFKA_CLUSTERS_0_NAME: "local"
# Inside Docker network Kafka listens on 29092
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: "kafka-broker:29092"
networks:
- kafka-network
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: elasticsearch
restart: unless-stopped
environment:
- discovery.type=single-node
- cluster.name=${CLUSTER_NAME}
- xpack.security.enabled=false
- xpack.security.transport.ssl.enabled=false
- ES_JAVA_OPTS=${ES_JAVA_OPTS}
- xpack.license.self_generated.type=${LICENSE}
volumes:
- es-data:/usr/share/elasticsearch/data
ports:
- "${ES_PORT}:9200"
networks:
- elastic
kibana:
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
container_name: kibana
restart: unless-stopped
depends_on:
- elasticsearch
environment:
- SERVER_NAME=kibana
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
ports:
- "${KIBANA_PORT}:5601"
networks:
- elastic
networks:
elastic:
driver: bridge
kafka-network:
driver: bridge
volumes:
es-data:
driver: local