forked from confluentinc/ksql-recipes-try-it-at-home
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdocker-compose.yml
93 lines (87 loc) · 2.86 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
---
version: '2'
services:
zookeeper:
image: "confluentinc/cp-zookeeper:5.0.1"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: "confluentinc/cp-enterprise-kafka:5.0.1"
ports:
- '9092:9092'
depends_on:
- zookeeper
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 100
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: kafka:29092
CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
CONFLUENT_METRICS_ENABLE: 'true'
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
# schema-registry:
# image: "confluentinc/cp-schema-registry:5.0.1"
# depends_on:
# - kafka
# ports:
# - '8081:8081'
# environment:
# SCHEMA_REGISTRY_HOST_NAME: schema-registry
# SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: zookeeper:2181
ksql-server:
image: confluentinc/cp-ksql-server:5.0.1
depends_on:
- kafka
# - schema-registry
environment:
KSQL_BOOTSTRAP_SERVERS: kafka:29092
KSQL_LISTENERS: http://0.0.0.0:8088
# KSQL_KSQL_SCHEMA_REGISTRY_URL: http://schema-registry:8081
# KSQL_KSQL_SERVICE_ID: confluent_rmoff_01
ksql-cli:
image: confluentinc/cp-ksql-cli:5.0.1
depends_on:
- ksql-server
entrypoint: /bin/sh
tty: true
# Two options for getting dummy data in here, one calls out to a
# test generator on the web, another uses a local file.
# Both running isn't a problem, you'll just get twice the throughput.
kafkacat-from-file:
image: confluentinc/cp-kafkacat:latest
depends_on:
- kafka
# echo "Waiting for Kafka ⏳"
# cub kafka-ready -b kafka:29092 1 300 &&
command:
- bash
- -c
- |
while [ 1 -eq 1 ]
do awk '{print $$0;system("sleep 0.5");}' /data/data.json | \
kafkacat -b kafka:29092 -P -t user_logons
done
volumes:
- $PWD:/data
# kafkacat-from-net:
# # kafkacat image doesn't have curl :(
# image: confluentinc/cp-kafkacat:latest
# depends_on:
# - kafka
# command:
# - bash
# - -c
# - |
# while [ 1 -eq 1 ]
# do curl "https://api.mockaroo.com/api/bce1b030?count=5000&key=ff7856d0" | \
# awk '{print $$0;system("sleep 0.5");}' | \
# kafkacat -b kafka:29092 -P -t user_logons
# done