Skip to content

Commit

Permalink
Add no_proxy in docker compose yaml for micro services (opea-project#267
Browse files Browse the repository at this point in the history
)

Signed-off-by: Zhou Ting <[email protected]>
  • Loading branch information
moting9 authored and Spycsh committed Jun 19, 2024
1 parent 4d9b89b commit dcae62e
Show file tree
Hide file tree
Showing 12 changed files with 36 additions and 0 deletions.
1 change: 1 addition & 0 deletions ChatQnA/docker/gaudi/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,7 @@ Then run the command `docker images`, you will have the following 8 Docker Image
Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below.

```bash
export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5"
Expand Down
10 changes: 10 additions & 0 deletions ChatQnA/docker/gaudi/docker_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ services:
ports:
- "6007:6007"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
REDIS_URL: ${REDIS_URL}
Expand All @@ -35,6 +36,7 @@ services:
- SYS_NICE
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HABANA_VISIBLE_DEVICES: all
Expand All @@ -49,6 +51,7 @@ services:
- "6000:6000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT}
Expand All @@ -65,6 +68,7 @@ services:
- "7000:7000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
REDIS_URL: ${REDIS_URL}
Expand All @@ -82,6 +86,7 @@ services:
- "./data:/data"
shm_size: 1g
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
Expand All @@ -97,6 +102,7 @@ services:
- "8000:8000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TEI_RERANKING_ENDPOINT: ${TEI_RERANKING_ENDPOINT}
Expand All @@ -115,6 +121,7 @@ services:
volumes:
- "./data:/data"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
Expand All @@ -136,6 +143,7 @@ services:
- "9000:9000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -161,6 +169,7 @@ services:
ports:
- "8888:8888"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
Expand All @@ -178,6 +187,7 @@ services:
ports:
- "5173:5173"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- CHAT_BASE_URL=${BACKEND_SERVICE_ENDPOINT}
Expand Down
1 change: 1 addition & 0 deletions CodeGen/docker/gaudi/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ Then run the command `docker images`, you will have the following 3 Docker image
Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below.

```bash
export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
export LLM_MODEL_ID="meta-llama/CodeLlama-7b-hf"
Expand Down
4 changes: 4 additions & 0 deletions CodeGen/docker/gaudi/docker_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ services:
volumes:
- "./data:/data"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HABANA_VISIBLE_DEVICES: all
Expand All @@ -32,6 +33,7 @@ services:
- "9000:9000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -48,6 +50,7 @@ services:
ports:
- "7778:7778"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
Expand All @@ -62,6 +65,7 @@ services:
ports:
- "5173:5173"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- BASIC_URL=${BACKEND_SERVICE_ENDPOINT}
Expand Down
1 change: 1 addition & 0 deletions CodeTrans/docker/gaudi/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ Then run the command `docker images`, you will have the following Docker Images:
Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. Notice that the `LLM_MODEL_ID` indicates the LLM model used for TGI service.

```bash
export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
export LLM_MODEL_ID="HuggingFaceH4/mistral-7b-grok"
Expand Down
4 changes: 4 additions & 0 deletions CodeTrans/docker/gaudi/docker_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ services:
volumes:
- "./data:/data"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HABANA_VISIBLE_DEVICES: all
Expand All @@ -30,6 +31,7 @@ services:
- "9000:9000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -47,6 +49,7 @@ services:
ports:
- "7777:7777"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
Expand All @@ -61,6 +64,7 @@ services:
ports:
- "5173:5173"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- BASE_URL=${BACKEND_SERVICE_ENDPOINT}
Expand Down
1 change: 1 addition & 0 deletions CodeTrans/docker/xeon/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ Then run the command `docker images`, you will have the following Docker Images:
Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below. Notice that the `LLM_MODEL_ID` indicates the LLM model used for TGI service.

```bash
export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
export LLM_MODEL_ID="HuggingFaceH4/mistral-7b-grok"
Expand Down
4 changes: 4 additions & 0 deletions CodeTrans/docker/xeon/docker_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ services:
- "./data:/data"
shm_size: 1g
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
HF_TOKEN: ${HUGGINGFACEHUB_API_TOKEN}
Expand All @@ -25,6 +26,7 @@ services:
- "9000:9000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -42,6 +44,7 @@ services:
ports:
- "7777:7777"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
Expand All @@ -56,6 +59,7 @@ services:
ports:
- "5173:5173"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- BASE_URL=${BACKEND_SERVICE_ENDPOINT}
Expand Down
1 change: 1 addition & 0 deletions DocSum/docker/gaudi/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ Then run the command `docker images`, you will have the following Docker Images:
Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below.

```bash
export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
Expand Down
4 changes: 4 additions & 0 deletions DocSum/docker/gaudi/docker_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ services:
ports:
- "8008:80"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -31,6 +32,7 @@ services:
- "9000:9000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -48,6 +50,7 @@ services:
ports:
- "8888:8888"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
Expand All @@ -62,6 +65,7 @@ services:
ports:
- "5173:5173"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- DOC_BASE_URL=${BACKEND_SERVICE_ENDPOINT}
Expand Down
1 change: 1 addition & 0 deletions DocSum/docker/xeon/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ Then run the command `docker images`, you will have the following Docker Images:
Since the `docker_compose.yaml` will consume some environment variables, you need to setup them in advance as below.

```bash
export no_proxy=${your_no_proxy}
export http_proxy=${your_http_proxy}
export https_proxy=${your_http_proxy}
export LLM_MODEL_ID="Intel/neural-chat-7b-v3-3"
Expand Down
4 changes: 4 additions & 0 deletions DocSum/docker/xeon/docker_compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ services:
ports:
- "8008:80"
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -28,6 +29,7 @@ services:
- "9000:9000"
ipc: host
environment:
no_proxy: ${no_proxy}
http_proxy: ${http_proxy}
https_proxy: ${https_proxy}
TGI_LLM_ENDPOINT: ${TGI_LLM_ENDPOINT}
Expand All @@ -45,6 +47,7 @@ services:
ports:
- "8888:8888"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- MEGA_SERVICE_HOST_IP=${MEGA_SERVICE_HOST_IP}
Expand All @@ -59,6 +62,7 @@ services:
ports:
- "5173:5173"
environment:
- no_proxy=${no_proxy}
- https_proxy=${https_proxy}
- http_proxy=${http_proxy}
- DOC_BASE_URL=${BACKEND_SERVICE_ENDPOINT}
Expand Down

0 comments on commit dcae62e

Please sign in to comment.