From 62dbb6daee9e9db880e7fa696ef823e32c852998 Mon Sep 17 00:00:00 2001 From: "Chendi.Xue" Date: Wed, 11 Sep 2024 20:06:53 -0500 Subject: [PATCH] [DocIndexRetriever] Add xeon test and fix gaudi test (#788) Signed-off-by: Chendi.Xue --- .../intel/{ => cpu/xeon}/README.md | 7 +- .../intel/cpu/xeon/compose.yaml | 45 ++---- .../docker_compose/intel/hpu/gaudi/README.md | 127 +++++++++++++++++ .../intel/hpu/gaudi/compose.yaml | 34 ++--- .../tests/test_compose_on_gaudi.sh | 14 +- .../tests/test_compose_on_xeon.sh | 131 ++++++++++++++++++ 6 files changed, 294 insertions(+), 64 deletions(-) rename DocIndexRetriever/docker_compose/intel/{ => cpu/xeon}/README.md (95%) create mode 100644 DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md create mode 100644 DocIndexRetriever/tests/test_compose_on_xeon.sh diff --git a/DocIndexRetriever/docker_compose/intel/README.md b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md similarity index 95% rename from DocIndexRetriever/docker_compose/intel/README.md rename to DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md index d2531001e..b921cc126 100644 --- a/DocIndexRetriever/docker_compose/intel/README.md +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/README.md @@ -46,7 +46,7 @@ export host_ip="YOUR IP ADDR" export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" export RERANK_MODEL_ID="BAAI/bge-reranker-base" -export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:8090" +export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:6006" export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808" export TGI_LLM_ENDPOINT="http://${host_ip}:8008" export REDIS_URL="redis://${host_ip}:6379" @@ -58,9 +58,8 @@ export RERANK_SERVICE_HOST_IP=${host_ip} export LLM_SERVICE_HOST_IP=${host_ip} export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8000/v1/retrievaltool" export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" -export llm_hardware='cpu/xeon' #cpu/xeon, xpu, hpu/gaudi -cd GenAIExamples/DocIndexRetriever/intel/${llm_hardware}/ -docker compose -f compose.yaml up -d +cd GenAIExamples/DocIndexRetriever/intel/cpu/xoen/ +docker compose up -d ``` ## 4. Validation diff --git a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml index 5f66bb742..23399f9d0 100644 --- a/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/cpu/xeon/compose.yaml @@ -11,7 +11,7 @@ services: - "6379:6379" - "8001:8001" dataprep-redis-service: - image: opea/dataprep-redis:latest + image: ${REGISTRY:-opea}/dataprep-redis:${TAG:-latest} container_name: dataprep-redis-server depends_on: - redis-vector-db @@ -24,8 +24,10 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} REDIS_URL: ${REDIS_URL} + REDIS_HOST: ${REDIS_HOST} INDEX_NAME: ${INDEX_NAME} TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} tei-embedding-service: image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 container_name: tei-embedding-server @@ -41,7 +43,7 @@ services: HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate embedding: - image: opea/embedding-tei:latest + image: ${REGISTRY:-opea}/embedding-tei:${TAG:-latest} container_name: embedding-tei-server ports: - "6000:6000" @@ -54,12 +56,9 @@ services: https_proxy: ${https_proxy} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} - LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} - LANGCHAIN_PROJECT: "opea-embedding-service" restart: unless-stopped retriever: - image: opea/retriever-redis:latest + image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -74,38 +73,14 @@ services: INDEX_NAME: ${INDEX_NAME} HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} - LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} - LANGCHAIN_PROJECT: "opea-retriever-service" restart: unless-stopped - - tei-reranking-service: - image: ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 - container_name: tei-reranking-server - ports: - - "8808:80" - volumes: - - "./data:/data" - shm_size: 1g - environment: - no_proxy: ${no_proxy} - http_proxy: ${http_proxy} - https_proxy: ${https_proxy} - HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} - HF_HUB_DISABLE_PROGRESS_BARS: 1 - HF_HUB_ENABLE_HF_TRANSFER: 0 - command: --model-id ${RERANK_MODEL_ID} --auto-truncate - reranking: - image: opea/reranking-tei:latest + image: ${REGISTRY:-opea}/reranking-tei:${TAG:-latest} container_name: reranking-tei-xeon-server - volumes: - - ${WORKDIR}/GenAIComps/comps/:/home/user/comps - depends_on: - - tei-reranking-service ports: - "8000:8000" ipc: host + entrypoint: python local_reranking.py environment: no_proxy: ${no_proxy} http_proxy: ${http_proxy} @@ -114,19 +89,15 @@ services: HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 - LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} - LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} - LANGCHAIN_PROJECT: "opea-reranking-service" restart: unless-stopped doc-index-retriever-server: - image: opea/doc-index-retriever:latest + image: ${REGISTRY:-opea}/doc-index-retriever:${TAG:-latest} container_name: doc-index-retriever-server depends_on: - redis-vector-db - tei-embedding-service - embedding - retriever - - tei-reranking-service - reranking ports: - "8889:8889" diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md new file mode 100644 index 000000000..2cac81c8f --- /dev/null +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/README.md @@ -0,0 +1,127 @@ +# DocRetriever Application with Docker + +DocRetriever are the most widely adopted use case for leveraging the different methodologies to match user query against a set of free-text records. DocRetriever is essential to RAG system, which bridges the knowledge gap by dynamically fetching relevant information from external sources, ensuring that responses generated remain factual and current. The core of this architecture are vector databases, which are instrumental in enabling efficient and semantic retrieval of information. These databases store data as vectors, allowing RAG to swiftly access the most pertinent documents or data points based on semantic similarity. + +## 1. Build Images for necessary microservices. (Optional after docker image release) + +- Embedding TEI Image + + ```bash + git clone https://github.com/opea-project/GenAIComps.git + cd GenAIComps + docker build -t opea/embedding-tei:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/embeddings/tei/langchain/Dockerfile . + ``` + +- Retriever Vector store Image + + ```bash + docker build -t opea/retriever-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/retrievers/redis/langchain/Dockerfile . + ``` + +- Rerank TEI Image + + ```bash + docker build -t opea/reranking-tei:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/reranks/tei/Dockerfile . + ``` + +- Dataprep Image + + ```bash + docker build -t opea/dataprep-redis:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f comps/dataprep/redis/langchain_ray/Dockerfile . + ``` + +## 2. Build Images for MegaService + +```bash +cd .. +git clone https://github.com/opea-project/GenAIExamples.git +cd GenAIExamples/DocIndexRetriever +docker build --no-cache -t opea/doc-index-retriever:latest --build-arg https_proxy=$https_proxy --build-arg http_proxy=$http_proxy -f ./Dockerfile . +``` + +## 3. Start all the services Docker Containers + +```bash +export host_ip="YOUR IP ADDR" +export HUGGINGFACEHUB_API_TOKEN=${your_hf_api_token} +export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" +export RERANK_MODEL_ID="BAAI/bge-reranker-base" +export TEI_EMBEDDING_ENDPOINT="http://${host_ip}:8090" +export TEI_RERANKING_ENDPOINT="http://${host_ip}:8808" +export TGI_LLM_ENDPOINT="http://${host_ip}:8008" +export REDIS_URL="redis://${host_ip}:6379" +export INDEX_NAME="rag-redis" +export MEGA_SERVICE_HOST_IP=${host_ip} +export EMBEDDING_SERVICE_HOST_IP=${host_ip} +export RETRIEVER_SERVICE_HOST_IP=${host_ip} +export RERANK_SERVICE_HOST_IP=${host_ip} +export LLM_SERVICE_HOST_IP=${host_ip} +export BACKEND_SERVICE_ENDPOINT="http://${host_ip}:8000/v1/retrievaltool" +export DATAPREP_SERVICE_ENDPOINT="http://${host_ip}:6007/v1/dataprep" +export llm_hardware='cpu/xeon' #cpu/xeon, xpu, hpu/gaudi +cd GenAIExamples/DocIndexRetriever/intel/hpu/gaudi/ +docker compose up -d +``` + +## 4. Validation + +Add Knowledge Base via HTTP Links: + +```bash +curl -X POST "http://${host_ip}:6007/v1/dataprep" \ + -H "Content-Type: multipart/form-data" \ + -F 'link_list=["https://opea.dev"]' + +# expected output +{"status":200,"message":"Data preparation succeeded"} +``` + +Retrieval from KnowledgeBase + +```bash +curl http://${host_ip}:8889/v1/retrievaltool -X POST -H "Content-Type: application/json" -d '{ + "text": "Explain the OPEA project?" + }' + +# expected output +{"id":"354e62c703caac8c547b3061433ec5e8","reranked_docs":[{"id":"06d5a5cefc06cf9a9e0b5fa74a9f233c","text":"Close SearchsearchMenu WikiNewsCommunity Daysx-twitter linkedin github searchStreamlining implementation of enterprise-grade Generative AIEfficiently integrate secure, performant, and cost-effective Generative AI workflows into business value.TODAYOPEA..."}],"initial_query":"Explain the OPEA project?"} +``` + +## 5. Trouble shooting + +1. check all containers are alive + + ```bash + # redis vector store + docker container logs redis-vector-db + # dataprep to redis microservice, input document files + docker container logs dataprep-redis-server + + # embedding microservice + curl http://${host_ip}:6000/v1/embeddings \ + -X POST \ + -d '{"text":"Explain the OPEA project"}' \ + -H 'Content-Type: application/json' > query + docker container logs embedding-tei-server + + # if you used tei-gaudi + docker container logs tei-embedding-gaudi-server + + # retriever microservice, input embedding output docs + curl http://${host_ip}:7000/v1/retrieval \ + -X POST \ + -d @query \ + -H 'Content-Type: application/json' > rerank_query + docker container logs retriever-redis-server + + + # reranking microservice + curl http://${host_ip}:8000/v1/reranking \ + -X POST \ + -d @rerank_query \ + -H 'Content-Type: application/json' > output + docker container logs reranking-tei-server + + # megaservice gateway + docker container logs doc-index-retriever-server + ``` diff --git a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml index 76bd24f35..76823db7b 100644 --- a/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml +++ b/DocIndexRetriever/docker_compose/intel/hpu/gaudi/compose.yaml @@ -9,13 +9,14 @@ services: image: redis/redis-stack:7.2.0-v9 container_name: redis-vector-db ports: - - "16379:6379" + - "6379:6379" - "8001:8001" dataprep-redis-service: - image: opea/dataprep-on-ray-redis:latest + image: ${REGISTRY:-opea}/dataprep-redis:${TAG:-latest} container_name: dataprep-redis-server depends_on: - redis-vector-db + - tei-embedding-service ports: - "6007:6007" environment: @@ -24,8 +25,10 @@ services: https_proxy: ${https_proxy} REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} + TEI_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} + HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} tei-embedding-service: - image: ghcr.io/huggingface/tei-gaudi:latest + image: ${REGISTRY:-opea}/tei-gaudi:${TAG:-latest} container_name: tei-embedding-gaudi-server ports: - "8090:80" @@ -39,12 +42,14 @@ services: no_proxy: ${no_proxy} http_proxy: ${http_proxy} https_proxy: ${https_proxy} - HABANA_VISIBLE_DEVICES: all + HABANA_VISIBLE_DEVICES: ${tei_embedding_devices:-all} OMPI_MCA_btl_vader_single_copy_mechanism: none MAX_WARMUP_SEQUENCE_LENGTH: 512 - command: --model-id ${EMBEDDING_MODEL_ID} + INIT_HCCL_ON_ACQUIRE: 0 + ENABLE_EXPERIMENTAL_FLAGS: true + command: --model-id ${EMBEDDING_MODEL_ID} --auto-truncate embedding: - image: opea/embedding-tei:latest + image: ${REGISTRY:-opea}/embedding-tei:${TAG:-latest} container_name: embedding-tei-server ports: - "6000:6000" @@ -56,12 +61,9 @@ services: http_proxy: ${http_proxy} https_proxy: ${https_proxy} TEI_EMBEDDING_ENDPOINT: ${TEI_EMBEDDING_ENDPOINT} - LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} - LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} - LANGCHAIN_PROJECT: "opea-embedding-service" restart: unless-stopped retriever: - image: opea/retriever-redis:latest + image: ${REGISTRY:-opea}/retriever-redis:${TAG:-latest} container_name: retriever-redis-server depends_on: - redis-vector-db @@ -74,13 +76,10 @@ services: https_proxy: ${https_proxy} REDIS_URL: ${REDIS_URL} INDEX_NAME: ${INDEX_NAME} - LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} - LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} - LANGCHAIN_PROJECT: "opea-retriever-service" restart: unless-stopped reranking: - image: opea/reranking-tei:latest - container_name: reranking-tei-server + image: ${REGISTRY:-opea}/reranking-tei:${TAG:-latest} + container_name: reranking-tei-gaudi-server ports: - "8000:8000" ipc: host @@ -93,12 +92,9 @@ services: HUGGINGFACEHUB_API_TOKEN: ${HUGGINGFACEHUB_API_TOKEN} HF_HUB_DISABLE_PROGRESS_BARS: 1 HF_HUB_ENABLE_HF_TRANSFER: 0 - LANGCHAIN_API_KEY: ${LANGCHAIN_API_KEY} - LANGCHAIN_TRACING_V2: ${LANGCHAIN_TRACING_V2} - LANGCHAIN_PROJECT: "opea-reranking-service" restart: unless-stopped doc-index-retriever-server: - image: opea/doc-index-retriever:latest + image: ${REGISTRY:-opea}/doc-index-retriever:${TAG:-latest} container_name: doc-index-retriever-server depends_on: - redis-vector-db diff --git a/DocIndexRetriever/tests/test_compose_on_gaudi.sh b/DocIndexRetriever/tests/test_compose_on_gaudi.sh index 9be5c5395..f5247a191 100644 --- a/DocIndexRetriever/tests/test_compose_on_gaudi.sh +++ b/DocIndexRetriever/tests/test_compose_on_gaudi.sh @@ -4,6 +4,11 @@ set -e echo "IMAGE_REPO=${IMAGE_REPO}" +IMAGE_TAG=${IMAGE_TAG:-"latest"} +echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" +echo "TAG=IMAGE_TAG=${IMAGE_TAG}" +# export REGISTRY=${IMAGE_REPO} +# export TAG=${IMAGE_TAG} WORKPATH=$(dirname "$PWD") LOG_PATH="$WORKPATH/tests" @@ -19,7 +24,7 @@ function build_docker_images() { docker build -t opea/embedding-tei:latest -f comps/embeddings/tei/langchain/Dockerfile . docker build -t opea/retriever-redis:latest -f comps/retrievers/redis/langchain/Dockerfile . docker build -t opea/reranking-tei:latest -f comps/reranks/tei/Dockerfile . - docker build -t opea/dataprep-on-ray-redis:latest -f comps/dataprep/redis/langchain_ray/Dockerfile . + docker build -t opea/dataprep-redis:latest -f comps/dataprep/redis/langchain/Dockerfile . docker pull ghcr.io/huggingface/tgi-gaudi:latest docker pull redis/redis-stack:7.2.0-v9 @@ -36,7 +41,7 @@ function start_services() { export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:8090" export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" export TGI_LLM_ENDPOINT="http://${ip_address}:8008" - export REDIS_URL="redis://${ip_address}:16379" + export REDIS_URL="redis://${ip_address}:6379" export INDEX_NAME="rag-redis" export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} export MEGA_SERVICE_HOST_IP=${ip_address} @@ -44,7 +49,6 @@ function start_services() { export RETRIEVER_SERVICE_HOST_IP=${ip_address} export RERANK_SERVICE_HOST_IP=${ip_address} export LLM_SERVICE_HOST_IP=${ip_address} - export RERANK_SERVICE_PORT=18000 # Start Docker Containers docker compose up -d @@ -99,7 +103,7 @@ function validate_megaservice() { function stop_docker() { cd $WORKPATH/docker_compose/intel/hpu/gaudi - container_list=$(cat docker_compose.yaml | grep container_name | cut -d':' -f2) + container_list=$(cat compose.yaml | grep container_name | cut -d':' -f2) for container_name in $container_list; do cid=$(docker ps -aq --filter "name=$container_name") echo "Stopping container $container_name" @@ -111,6 +115,8 @@ function main() { stop_docker build_docker_images + echo "Dump current docker ps" + docker ps start_time=$(date +%s) start_services end_time=$(date +%s) diff --git a/DocIndexRetriever/tests/test_compose_on_xeon.sh b/DocIndexRetriever/tests/test_compose_on_xeon.sh new file mode 100644 index 000000000..d061b8d20 --- /dev/null +++ b/DocIndexRetriever/tests/test_compose_on_xeon.sh @@ -0,0 +1,131 @@ +#!/bin/bash +# Copyright (C) 2024 Intel Corporation +# SPDX-License-Identifier: Apache-2.0 + +set -e +echo "IMAGE_REPO=${IMAGE_REPO}" +IMAGE_TAG=${IMAGE_TAG:-"latest"} +echo "REGISTRY=IMAGE_REPO=${IMAGE_REPO}" +echo "TAG=IMAGE_TAG=${IMAGE_TAG}" +# export REGISTRY=${IMAGE_REPO} +# export TAG=${IMAGE_TAG} + +WORKPATH=$(dirname "$PWD") +LOG_PATH="$WORKPATH/tests" +ip_address=$(hostname -I | awk '{print $1}') + +function build_docker_images() { + cd $WORKPATH/docker_image_build + if [ ! -d "GenAIComps" ] ; then + git clone https://github.com/opea-project/GenAIComps.git && cd GenAIComps && git checkout "${opea_branch:-"main"}" && cd ../ + fi + cd GenAIComps + + docker build -t opea/embedding-tei:latest -f comps/embeddings/tei/langchain/Dockerfile . + docker build -t opea/retriever-redis:latest -f comps/retrievers/redis/langchain/Dockerfile . + docker build -t opea/reranking-tei:latest -f comps/reranks/tei/Dockerfile . + docker build -t opea/dataprep-redis:latest -f comps/dataprep/redis/langchain/Dockerfile . + + docker pull ghcr.io/huggingface/text-embeddings-inference:cpu-1.5 + docker pull redis/redis-stack:7.2.0-v9 + + cd $WORKPATH/ + docker build -t opea/doc-index-retriever:latest -f ./Dockerfile . +} + +function start_services() { + cd $WORKPATH/docker_compose/intel/cpu/xeon + export EMBEDDING_MODEL_ID="BAAI/bge-base-en-v1.5" + export RERANK_MODEL_ID="BAAI/bge-reranker-base" + export TEI_EMBEDDING_ENDPOINT="http://${ip_address}:6006" + export TEI_RERANKING_ENDPOINT="http://${ip_address}:8808" + export TGI_LLM_ENDPOINT="http://${ip_address}:8008" + export REDIS_URL="redis://${ip_address}:6379" + export INDEX_NAME="rag-redis" + export HUGGINGFACEHUB_API_TOKEN=${HUGGINGFACEHUB_API_TOKEN} + export MEGA_SERVICE_HOST_IP=${ip_address} + export EMBEDDING_SERVICE_HOST_IP=${ip_address} + export RETRIEVER_SERVICE_HOST_IP=${ip_address} + export RERANK_SERVICE_HOST_IP=${ip_address} + export LLM_SERVICE_HOST_IP=${ip_address} + + # Start Docker Containers + docker compose up -d + sleep 20 +} + +function validate() { + local CONTENT="$1" + local EXPECTED_RESULT="$2" + local SERVICE_NAME="$3" + + if echo "$CONTENT" | grep -q "$EXPECTED_RESULT"; then + echo "[ $SERVICE_NAME ] Content is as expected: $CONTENT." + echo 0 + else + echo "[ $SERVICE_NAME ] Content does not match the expected result: $CONTENT" + echo 1 + fi +} + +function validate_megaservice() { + echo "Testing DataPrep Service" + local CONTENT=$(curl -X POST "http://${ip_address}:6007/v1/dataprep" \ + -H "Content-Type: multipart/form-data" \ + -F 'link_list=["https://opea.dev"]') + local EXIT_CODE=$(validate "$CONTENT" "Data preparation succeeded" "dataprep-redis-service-xeon") + echo "$EXIT_CODE" + local EXIT_CODE="${EXIT_CODE:0-1}" + echo "return value is $EXIT_CODE" + if [ "$EXIT_CODE" == "1" ]; then + docker logs dataprep-redis-server | tee -a ${LOG_PATH}/dataprep-redis-service-xeon.log + return 1 + fi + + # Curl the Mega Service + echo "Testing retriever service" + local CONTENT=$(curl http://${ip_address}:8889/v1/retrievaltool -X POST -H "Content-Type: application/json" -d '{ + "text": "Explain the OPEA project?" + }') + local EXIT_CODE=$(validate "$CONTENT" "OPEA" "doc-index-retriever-service-xeon") + echo "$EXIT_CODE" + local EXIT_CODE="${EXIT_CODE:0-1}" + echo "return value is $EXIT_CODE" + if [ "$EXIT_CODE" == "1" ]; then + docker logs tei-embedding-xeon-server | tee -a ${LOG_PATH}/doc-index-retriever-service-xeon.log + docker logs retriever-redis-server | tee -a ${LOG_PATH}/doc-index-retriever-service-xeon.log + docker logs reranking-tei-server | tee -a ${LOG_PATH}/doc-index-retriever-service-xeon.log + docker logs doc-index-retriever-server | tee -a ${LOG_PATH}/doc-index-retriever-service-xeon.log + exit 1 + fi +} + +function stop_docker() { + cd $WORKPATH/docker_compose/intel/cpu/xeon + container_list=$(cat compose.yaml | grep container_name | cut -d':' -f2) + for container_name in $container_list; do + cid=$(docker ps -aq --filter "name=$container_name") + echo "Stopping container $container_name" + if [[ ! -z "$cid" ]]; then docker rm $cid -f && sleep 1s; fi + done +} + +function main() { + + stop_docker + build_docker_images + echo "Dump current docker ps" + docker ps + start_time=$(date +%s) + start_services + end_time=$(date +%s) + duration=$((end_time-start_time)) + echo "Mega service start duration is $duration s" + validate_megaservice + + stop_docker + echo y | docker system prune + +} + +main