使用 docker-compose 设置 Kafka
Posted
技术标签:
【中文标题】使用 docker-compose 设置 Kafka【英文标题】:Kafka setup with docker-compose 【发布时间】:2017-08-20 02:41:57 【问题描述】:您好,我目前正在使用 Docker 设置 Kafka。我已经设法使用已发布的融合图像设置 Zookeeper 和 Kafka,请参阅以下 docker-compose 文件:
version: '2'
services:
zookeeper:
image: confluentinc/cp-zookeeper:3.2.0
container_name: zookeeper
hostname: zookeeper
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
restart: always
kafka:
image: confluentinc/cp-kafka:3.2.0
hostname: kafka
container_name: kafka
depends_on:
- zookeeper
ports:
- '9092:9092'
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://192.168.99.100:9092
LISTENERS: PLAINTEXT://0.0.0.0:9092
restart: always
kafka-rest:
image: confluentinc/cp-kafka-rest:3.2.0
container_name: kafka-rest
depends_on:
- kafka
ports:
- '8082:8082'
environment:
KAFKA_REST_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_REST_LISTENERS: http://kafka-rest:8082
KAFKA_REST_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_REST_HOST_NAME: kafka-rest
restart: always
schema-registry:
image: confluentinc/cp-schema-registry:3.2.0
container_name: schema-registry
depends_on:
- kafka
ports:
- '8081'
environment:
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_LISTENERS: http://schema-registry:8081
restart: always
connect:
image: confluentinc/cp-kafka-connect:3.2.0
container_name: kafka-connect
depends_on:
- zookeeper
- kafka
- schema-registry
ports:
- "8083:8083"
restart: always
environment:
CONNECT_BOOTSTRAP_SERVERS: 'kafka:9092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_ZOOKEEPER_CONNECT: "zookeeper:2181"
现在我已经成功地将 Kafka 容器正确地暴露给我的非 dockerized 应用程序,方法是将adverted.listener 属性正确设置为 PLAINTEXT://DOCKER_MACHINE_IP:9092,但正如您所见,我还添加了其他融合应用程序来扩展我的 Kafka 设置(Kafka REST、Schema-Registry)。由于adverted.listener 属性,这些无法再连接到我的Kafka 实例。
我可以将其更改为正确的容器主机名 --> PLAINTEXT://kafka:9092 但随后我无法使用其他应用程序再次访问 kafka 实例。有什么简单的方法可以解决这个问题吗?
【问题讨论】:
【参考方案1】:Omar,也许您已经解决了您的问题,但为了将来参考,Hans Jespersen 的评论对我有用,即使在 Windows 上也是如此。
以管理员身份打开 C:\Windows\System32\drivers\etc\hosts
并添加以下行以将 kafka 代理公开为 localhost。
127.0.0.1 broker
mydocker-compose.yml
文件如下所示:
---
version: '2'
services:
zookeeper:
image: confluentinc/cp-zookeeper
hostname: zookeeper
extra_hosts:
- "moby:127.0.0.1"
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
broker:
image: confluentinc/cp-kafka
hostname: broker
extra_hosts:
- "moby:127.0.0.1"
depends_on:
- zookeeper
ports:
- '9092:9092'
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://broker:9092'
KAFKA_DEFAULT_REPLICATION_FACTOR: 1
schema_registry:
image: confluentinc/cp-schema-registry
hostname: schema_registry
# extra_hosts:
# - "moby:127.0.0.1"
depends_on:
- zookeeper
- broker
ports:
- '8081:8081'
environment:
SCHEMA_REGISTRY_HOST_NAME: schema_registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
kafka-rest:
image: confluentinc/cp-kafka-rest
container_name: kafka-rest
extra_hosts:
- "moby:127.0.0.1"
depends_on:
- zookeeper
- broker
ports:
- '8082:8082'
environment:
KAFKA_REST_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_REST_LISTENERS: http://kafka-rest:8082
KAFKA_REST_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_REST_HOST_NAME: kafka-rest
另外,公开我的笔记本电脑的当前 IP 地址(使用 ipconfig /all)也可以,但这样做的缺点是,每当我的网络发生变化时,我也必须更改 docker-compose.yml
文件。
【讨论】:
这可能已经成功了,但 Docker Compose 的想法不是这个技巧(将主机名添加到 Docker 主机上的 /etc/hosts),因为它是在 Docker 网络中解决? (很可能通过将组合的正确主机名添加到正在运行的容器中的 /etc/hosts 文件中。) 编辑主机文件是一种黑客行为。参考rmoff.net/2018/08/02/kafka-listeners-explained【参考方案2】:假设这是针对您的本地开发环境的设置,这里是在 Docker 网络中运行的解决方案。
version: '2'
services:
zookeeper:
image: confluentinc/cp-zookeeper:3.2.0
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
restart: always
ports: ['2181:2181']
kafka:
image: confluentinc/cp-kafka:3.2.0
depends_on:
- zookeeper
ports: ['29092:29092']
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
restart: always
kafka-rest:
image: confluentinc/cp-kafka-rest:3.2.0
depends_on:
- kafka
ports: ['8082:8082']
environment:
KAFKA_REST_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_REST_LISTENERS: http://0.0.0.0:8082
KAFKA_REST_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_REST_HOST_NAME: localhost
restart: always
schema-registry:
image: confluentinc/cp-schema-registry:3.2.0
depends_on:
- kafka
ports: ['8081:8081']
environment:
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
restart: always
connect:
image: confluentinc/cp-kafka-connect:3.2.0
depends_on:
- zookeeper
- kafka
- schema-registry
ports: ['8083:8083']
restart: always
environment:
CONNECT_BOOTSTRAP_SERVERS: 'kafka:29092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_ZOOKEEPER_CONNECT: "zookeeper:2181"
在此更新 - https://github.com/confluentinc/examples/blob/5.3.1-post/cp-all-in-one/docker-compose.yml
【讨论】:
不!不要编辑主机文件。如果您正确配置环境变量rmoff.net/2018/08/02/kafka-listeners-explained,它可以在 Mac(和 Windows)上正常工作【参考方案3】:单个 Kafka 和单个 Zookepper
version: '2.1'
services:
zookeeper:
image: wurstmeister/zookeeper
hostname: zookeeper
container_name: zookeeper
ports:
- "2181:2181"
environment:
ZOO_MY_ID: 1
ZOO_PORT: 2181
ZOO_SERVERS: server.1=zookeeper:2888:3888
restart: always
volumes:
- ./zk-single-kafka-single/zookeeper/data:/data
- ./zk-single-kafka-single/zookeeper/datalog:/datalog
kafka:
image: wurstmeister/kafka
hostname: kafka
container_name: kafka
ports:
- "9092:9092"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ADVERTISED_LISTENERS: LISTENER_DOCKER_INTERNAL://kafka:9092,LISTENER_DOCKER_EXTERNAL://_HOSTNAME_COMMAND:9094
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: LISTENER_DOCKER_INTERNAL:PLAINTEXT,LISTENER_DOCKER_EXTERNAL:PLAINTEXT
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENERS: LISTENER_DOCKER_INTERNAL://:9092,LISTENER_DOCKER_EXTERNAL://:9094
KAFKA_INTER_BROKER_LISTENER_NAME: LISTENER_DOCKER_INTERNAL
KAFKA_ADVERTISED_HOST_NAME: 172.19.0.1
KAFKA_BROKER_ID: 1
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
restart: always
volumes:
- ./zk-single-kafka-single/kafka/data:/var/lib/kafka/data
depends_on:
- zookeeper
【讨论】:
以上是关于使用 docker-compose 设置 Kafka的主要内容,如果未能解决你的问题,请参考以下文章
使用 Docker-Composer 设置 GItlab 时出错
如何使用可访问的 docker-compose 设置特定的 IP 地址
如何访问使用 docker-compose 设置的 postgresql 数据库?
如何在 docker-compose 上使用 nodejs 在 mongoDB 上设置身份验证?