-
Notifications
You must be signed in to change notification settings - Fork 16
/
Copy pathdocker-compose.yaml
150 lines (143 loc) · 5.43 KB
/
docker-compose.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
---
version: '3'
services:
zookeeper:
image: confluentinc/cp-zookeeper:7.2.2
container_name: zookeeper
hostname: zookeeper
environment:
SCHEMA_REGISTRY_HOST_NAME: zookeeper
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: confluentinc/cp-kafka:7.2.2
container_name: kafka
ports:
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-
# An important note about accessing Kafka from clients on other machines:
# -----------------------------------------------------------------------
#
# The config used here exposes port 29092 for _external_ connections to the broker
# i.e. those from _outside_ the docker network. This could be from the host machine
# running docker, or maybe further afield if you've got a more complicated setup.
# If the latter is true, you will need to change the value 'localhost' in
# KAFKA_ADVERTISED_LISTENERS to one that is resolvable to the docker host from those
# remote clients
#
# For connections _internal_ to the docker network, such as from other services
# and components, use kafka:9092.
#
# See https://rmoff.net/2018/08/02/kafka-listeners-explained/ for details
# https://www.confluent.io/blog/kafka-client-cannot-connect-to-broker-on-aws-on-docker-etc/
# https://www.baeldung.com/kafka-docker-connection
# "`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-'"`-._,-
- "29092:29092"
depends_on:
- zookeeper
environment:
KAFKA_LISTENERS: INTERNAL://0.0.0.0:9092,EXTERNAL_SAME_HOST://0.0.0.0:29092
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:9092,EXTERNAL_SAME_HOST://localhost:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL_SAME_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_SCHEMA_REGISTRY_URL: "schema-registry:8081"
schema-registry:
image: confluentinc/cp-schema-registry:7.2.2
container_name: schema-registry
restart: always
depends_on:
- zookeeper
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: "kafka:9092"
SCHEMA_REGISTRY_LISTENERS: "http://0.0.0.0:8081"
JVM_OPTS: "-Xms256M -Xmx256M"
ports:
- "8081:8081"
console:
image: vectorized/console:v2.0.2
container_name: console
restart: on-failure
hostname: console
volumes:
- ./console-config.yaml:/etc/console/console-config.yaml
ports:
- "8080:8080"
entrypoint: ./console --config.filepath=/etc/console/console-config.yaml
depends_on:
- kafka
- schema-registry
connect:
image: confluentinc/cp-kafka-connect:7.2.2
container_name: kafka-connect
hostname: connect
depends_on:
- zookeeper
- kafka
- schema-registry
- elasticsearch
ports:
- "8083:8083"
environment:
CONNECT_BOOTSTRAP_SERVERS: "kafka:9092"
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: kafka-connect
CONNECT_CONFIG_STORAGE_TOPIC: _connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: _connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: _connect-status
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_VALUE_CONVERTER: io.confluent.connect.protobuf.ProtobufConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_INTERNAL_KEY_CONVERTER: "org.apache.kafka.connect.storage.StringConverter"
CONNECT_INTERNAL_VALUE_CONVERTER: "io.confluent.connect.protobuf.ProtobufConverter"
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect"
CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_PLUGIN_PATH: /usr/share/java,/usr/share/confluent-hub-components,/data/connect-jars
command:
- bash
- -c
- |
echo "Installing Connector"
confluent-hub install neo4j/kafka-connect-neo4j:2.0.2 --no-prompt
confluent-hub install confluentinc/kafka-connect-elasticsearch:11.1.10 --no-prompt
confluent-hub install confluentinc/kafka-connect-protobuf-converter:7.1.1 --no-prompt
#
echo "Launching Kafka Connect worker"
/etc/confluent/docker/run &
#
sleep infinity
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch-oss:7.10.2
container_name: elasticsearch
hostname: elasticsearch
environment:
- discovery.type=single-node
- ES_JAVA_OPTS=-Xms512m -Xmx512m
ports:
- "9200:9200"
- "9300:9300"
neo4j:
image: neo4j:4.4.6
hostname: neo4j
container_name: neo4j
ports:
- "7474:7474"
- "7687:7687"
depends_on:
- kafka
volumes:
- $PWD/neo4j/plugins:/plugins
- $PWD/neo4j/conf:/var/lib/neo4j/conf/
environment:
NEO4J_ACCEPT_LICENSE_AGREEMENT: 'yes'
NEO4J_AUTH: none
NEO4JLABS_PLUGINS: '["apoc"]'