-
Notifications
You must be signed in to change notification settings - Fork 0
/
docker-compose.yml
140 lines (127 loc) · 3.45 KB
/
docker-compose.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
version: '3'
# docker-compose up --build
services:
reverse-proxy-container:
build:
context: ./nginx
ports:
- 80:80
restart: always
volumes:
- "./nginx/nginx.conf:/etc/nginx/nginx.conf"
- "static_volume:/static"
environment:
TZ: Asia/Seoul
depends_on:
- django
# django:
# condition: service_healthy
networks:
- server
django:
build:
context: .
dockerfile: Dockerfile
# python manage.py runserver 0.0.0.0:8000
# sh -c "python manage.py makemigrations --noinput && python manage.py migrate --noinput && python manage.py collectstatic --no-input && gunicorn --bind 0.0.0.0:8000 main.wsgi:application"
command: >
sh -c "python manage.py makemigrations --noinput && python manage.py migrate --noinput && python manage.py collectstatic --no-input && gunicorn --bind 0.0.0.0:8000 main.wsgi:application"
ports:
- "8000:8000"
stdin_open: true
tty: true
volumes:
- .:/app
healthcheck:
test: curl -f http://localhost:8000
interval: 60s
timeout: 3s
retries: 1
networks:
- server
- rmoff_kafka
depends_on:
- kafka
redis-chat:
image: redis
restart: always
container_name: redis-chat
ports:
- 6379:6379
command: redis-server
healthcheck:
test: 'redis-cli -h 127.0.0.1 ping'
interval: 3s
timeout: 1s
retries: 5
depends_on:
- django
zoo:
image: confluentinc/cp-zookeeper:7.3.2
hostname: zoo
container_name: zoo
ports:
- "2181:2181"
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_SERVERS: zoo:2888:3888
networks:
- rmoff_kafka
kafka:
image: confluentinc/cp-kafka:7.3.2
hostname: kafka
container_name: kafka
ports:
- "9092:9092"
- "29092:29092"
- "9999:9999"
environment:
# 도커 네트워크 내부 : internal
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka:19092,EXTERNAL://${DOCKER_HOST_IP:-127.0.0.1}:9092,DOCKER://host.docker.internal:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT,EXTERNAL:PLAINTEXT,DOCKER:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_ZOOKEEPER_CONNECT: "zoo:2181"
KAFKA_BROKER_ID: 1
KAFKA_LOG4J_LOGGERS: "kafka.controller=INFO,kafka.producer.async.DefaultEventHandler=INFO,state.change.logger=INFO"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_JMX_PORT: 9999
KAFKA_JMX_HOSTNAME: ${DOCKER_HOST_IP:-127.0.0.1}
KAFKA_AUTHORIZER_CLASS_NAME: kafka.security.authorizer.AclAuthorizer
KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
depends_on:
- zoo
networks:
- rmoff_kafka
python-kafka-consumer:
build:
context: ./consumer
dockerfile: Dockerfile
container_name: python-kafka-consumer
# command: >
# sh -c "nohup python consumer.py"
volumes:
- ./consumer:/app
stdin_open: true
tty: true
networks:
- rmoff_kafka
depends_on:
- django
volumes:
redis-data:
static_volume: # Volume definition
driver: local
driver_opts:
type: none
device: ${PWD}/static
o: bind
networks:
server:
driver: bridge
rmoff_kafka:
driver: bridge