-
Notifications
You must be signed in to change notification settings - Fork 11
/
docker-compose.yaml
215 lines (200 loc) · 5.87 KB
/
docker-compose.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
version: '3.8'
x-airflow-common:
&airflow-common
build: orchestration/.
environment:
&airflow-common-env
AIRFLOW__CORE__EXECUTOR: LocalExecutor
AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
AIRFLOW__CORE__FERNET_KEY: ''
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
AIRFLOW__CORE__LOAD_EXAMPLES: 'false'
AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth,airflow.api.auth.backend.session'
# yamllint disable rule:line-length
# Use simple http server on scheduler for health checks
# See https://airflow.apache.org/docs/apache-airflow/stable/administration-and-deployment/logging-monitoring/check-health.html#scheduler-health-check-server
# yamllint enable rule:line-length
AIRFLOW__SCHEDULER__ENABLE_HEALTH_CHECK: 'true'
_PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
AIRFLOW_CONFIG: '/opt/airflow/orchestration/config/airflow.cfg'
volumes:
- ${AIRFLOW_PROJ_DIR:-.}/orchestration/dags:/opt/airflow/orchestration/dags/
- ${AIRFLOW_PROJ_DIR:-.}/orchestration/logs:/opt/airflow/orchestration/logs/
- ${AIRFLOW_PROJ_DIR:-.}/orchestration/config:/opt/airflow/orchestration/config/
- ${AIRFLOW_PROJ_DIR:-.}/orchestration/plugins:/opt/airflow/orchestration/plugins/
user: "${AIRFLOW_UID:-50000}:0"
depends_on:
&airflow-common-depends-on
redis:
condition: service_healthy
postgres:
condition: service_healthy
services:
#airflow postgres
postgres:
image: postgres:13
container_name: airflow_postgres
environment:
POSTGRES_USER: airflow
POSTGRES_PASSWORD: airflow
POSTGRES_DB: airflow
volumes:
- postgres-db-volume:/var/lib/postgresql/data
healthcheck:
test: ["CMD", "pg_isready", "-U", "airflow"]
interval: 10s
retries: 5
start_period: 5s
ports:
- "5432:5432"
networks:
- network
# restart: on-failure
pgadmin:
image: dpage/pgadmin4
restart: always
container_name: pgAdmin
environment:
- PGADMIN_DEFAULT_PASSWORD=123456
- PGADMIN_LISTEN_PORT=5050
ports:
- "5050:5050"
volumes:
- pgvolume:/var/lib/pgadmin
depends_on:
- postgres
networks:
- network
redis:
image: redis:7.2-bookworm
container_name: airflow_redis
expose:
- 6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 30s
retries: 50
start_period: 30s
networks:
- network
# restart: on-failure
airflow-webserver:
<<: *airflow-common
command: webserver
container_name: airflow_webserver
ports:
- "8080:8080"
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
networks:
- network
airflow-scheduler:
<<: *airflow-common
command: scheduler
container_name: airflow_scheduler
restart: always
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
mem_limit: 1024m # Set memory limit to 1GB
networks:
- network
airflow-worker:
<<: *airflow-common
command: celery worker
container_name: airflow_worker
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
networks:
- network
airflow-triggerer:
<<: *airflow-common
command: triggerer
container_name: airflow_triggerer
depends_on:
<<: *airflow-common-depends-on
airflow-init:
condition: service_completed_successfully
networks:
- network
airflow-init:
<<: *airflow-common
entrypoint: /bin/bash
command:
- -c
- |
mkdir -p /orchestration/sources/logs /orchestration/sources/dags /orchestration/sources/plugins
chown -R "${AIRFLOW_UID}:0" /orchestration/sources/{logs,dags,plugins}
exec /entrypoint airflow version
environment:
<<: *airflow-common-env
_AIRFLOW_DB_MIGRATE: 'true'
_AIRFLOW_WWW_USER_CREATE: 'true'
_AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
_AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
user: "0:0"
volumes:
- ${AIRFLOW_PROJ_DIR:-.}/orchestration:/sources
networks:
- network
# LLM
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.4.3
container_name: elasticsearch
environment:
- discovery.type=single-node
- xpack.security.enabled=false
ports:
- "9200:9200"
- "9300:9300"
networks:
- network
volumes:
- elastic-search-data:/usr/share/elasticsearch/data
app:
build: llm-app/.
container_name: llm_app
environment:
- HUGGINGFACE_KEY=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
volumes:
- ./llm-app/:/app
networks:
- network
depends_on:
- elasticsearch
ports:
- "8501:8501"
grafana:
image: grafana/grafana
container_name: grafana
ports:
- '3000:3000'
environment:
- GF_SECURITY_ADMIN_PASSWORD=${GRAFANA_ADMIN_PASSWORD:-admin}
volumes:
- grafana-storage:/var/lib/grafana
- grafana-plugins:/var/lib/grafana/plugins
networks:
- network
depends_on:
- postgres
networks:
network:
driver: bridge
volumes:
# mongo-data:
pgvolume:
postgres-db-volume:
grafana-storage:
grafana-plugins:
elastic-search-data: