kafka/docker-compose.yml

104 lines
3.0 KiB
YAML
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

services:
kafka:
image: apache/kafka:latest
container_name: kafka-broker
hostname: kafka-broker
restart: unless-stopped
ports:
- "9092:9092"
environment:
# Single-broker KRaft config (broker + controller in one process)
KAFKA_BROKER_ID: 1
KAFKA_PROCESS_ROLES: broker,controller
KAFKA_NODE_ID: 1
KAFKA_CONTROLLER_QUORUM_VOTERS: 1@kafka-broker:29093
KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT,CONTROLLER:PLAINTEXT
# Listeners:
# - PLAINTEXT: for other containers in same network
# - PLAINTEXT_HOST: for host / outside
KAFKA_LISTENERS: PLAINTEXT://kafka-broker:29092,CONTROLLER://kafka-broker:29093,PLAINTEXT_HOST://0.0.0.0:9092
# If you connect from OUTSIDE the LXC over LAN,
# change 'localhost' below to the LXC IP (e.g. 192.168.1.50)
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-broker:29092,PLAINTEXT_HOST://localhost:9092
# Single-broker-safe replication settings
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
# Logs (inside container)
KAFKA_LOG_DIRS: /tmp/kraft-combined-logs
# Any fixed cluster id string is fine for local dev;
# this is the same example used in Confluent's tutorial.
CLUSTER_ID: MkU3OEVBNTcwNTJENDM2Qk
networks:
- kafka-network
kafka-ui:
image: provectuslabs/kafka-ui:latest
container_name: kafka-ui
restart: unless-stopped
depends_on:
- kafka
ports:
- "8080:8080"
environment:
KAFKA_CLUSTERS_0_NAME: "local"
# Inside Docker network Kafka listens on 29092
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: "kafka-broker:29092"
networks:
- kafka-network
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION}
container_name: elasticsearch
restart: unless-stopped
environment:
- discovery.type=single-node
- cluster.name=${CLUSTER_NAME}
# Dev/test için security kapalı
- xpack.security.enabled=false
- xpack.security.transport.ssl.enabled=false
# Heap size
- ES_JAVA_OPTS=${ES_JAVA_OPTS}
# Lisans modu
- xpack.license.self_generated.type=${LICENSE}
ulimits:
memlock:
soft: -1
hard: -1
nofile:
soft: 65536
hard: 65536
volumes:
- es-data:/usr/share/elasticsearch/data
ports:
- "${ES_PORT}:9200"
networks:
- elastic
kibana:
image: docker.elastic.co/kibana/kibana:${STACK_VERSION}
container_name: kibana
restart: unless-stopped
depends_on:
- elasticsearch
environment:
- SERVER_NAME=kibana
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
ports:
- "${KIBANA_PORT}:5601"
networks:
- elastic
networks:
kafka-network:
driver: bridge