diff --git a/helm-charts/common/speecht5/templates/deployment.yaml b/helm-charts/common/speecht5/templates/deployment.yaml index f29013ea..475a99b8 100644 --- a/helm-charts/common/speecht5/templates/deployment.yaml +++ b/helm-charts/common/speecht5/templates/deployment.yaml @@ -74,6 +74,9 @@ spec: hostPath: path: {{ .Values.global.modelUseHostPath }} type: Directory + {{- else if .Values.global.modelUsePV }} + persistentVolumeClaim: + claimName: {{ .Values.global.modelUsePV }} {{- else }} emptyDir: {} {{- end }} diff --git a/helm-charts/common/speecht5/values.yaml b/helm-charts/common/speecht5/values.yaml index 2d217ddb..9e614f73 100644 --- a/helm-charts/common/speecht5/values.yaml +++ b/helm-charts/common/speecht5/values.yaml @@ -84,4 +84,10 @@ global: http_proxy: "" https_proxy: "" no_proxy: "" + + # Choose where to save your downloaded models + # modelUseHostPath: Host directory path, this is good for one node test. + # modelUsePV: PersistentVolumeClaim(PVC) name, which is suitable for multinode deployment + # comment out both will not have model cache directory and download the model from huggingface. modelUseHostPath: /mnt/opea-models + # modelUsePV: model-volume diff --git a/helm-charts/common/tei/templates/deployment.yaml b/helm-charts/common/tei/templates/deployment.yaml index 38122632..cbb2e3ca 100644 --- a/helm-charts/common/tei/templates/deployment.yaml +++ b/helm-charts/common/tei/templates/deployment.yaml @@ -78,6 +78,9 @@ spec: hostPath: path: {{ .Values.global.modelUseHostPath }} type: Directory + {{- else if .Values.global.modelUsePV }} + persistentVolumeClaim: + claimName: {{ .Values.global.modelUsePV }} {{- else }} emptyDir: {} {{- end }} diff --git a/helm-charts/common/tei/values.yaml b/helm-charts/common/tei/values.yaml index 54545d80..9ce013fd 100644 --- a/helm-charts/common/tei/values.yaml +++ b/helm-charts/common/tei/values.yaml @@ -82,6 +82,10 @@ global: http_proxy: "" https_proxy: "" no_proxy: "" - # set modelUseHostPath to host directory if you want to use hostPath volume for model storage - # comment out modeluseHostPath if you want to download the model from huggingface + + # Choose where to save your downloaded models + # modelUseHostPath: Host directory path, this is good for one node test. + # modelUsePV: PersistentVolumeClaim(PVC) name, which is suitable for multinode deployment + # comment out both will not have model cache directory and download the model from huggingface. modelUseHostPath: /mnt/opea-models + # modelUsePV: model-volume diff --git a/helm-charts/common/teirerank/templates/deployment.yaml b/helm-charts/common/teirerank/templates/deployment.yaml index ff2c84a8..3a75f877 100644 --- a/helm-charts/common/teirerank/templates/deployment.yaml +++ b/helm-charts/common/teirerank/templates/deployment.yaml @@ -78,6 +78,9 @@ spec: hostPath: path: {{ .Values.global.modelUseHostPath }} type: Directory + {{- else if .Values.global.modelUsePV }} + persistentVolumeClaim: + claimName: {{ .Values.global.modelUsePV }} {{- else }} emptyDir: {} {{- end }} diff --git a/helm-charts/common/teirerank/values.yaml b/helm-charts/common/teirerank/values.yaml index b0062f1b..2b8e90fc 100644 --- a/helm-charts/common/teirerank/values.yaml +++ b/helm-charts/common/teirerank/values.yaml @@ -82,6 +82,10 @@ global: http_proxy: "" https_proxy: "" no_proxy: "" - # set modelUseHostPath to host directory if you want to use hostPath volume for model storage - # comment out modeluseHostPath if you want to download the model from huggingface + + # Choose where to save your downloaded models + # modelUseHostPath: Host directory path, this is good for one node test. + # modelUsePV: PersistentVolumeClaim(PVC) name, which is suitable for multinode deployment + # comment out both will not have model cache directory and download the model from huggingface. modelUseHostPath: /mnt/opea-models + # modelUsePV: model-volume diff --git a/helm-charts/common/tgi/templates/deployment.yaml b/helm-charts/common/tgi/templates/deployment.yaml index 6db9162c..9dea3dc1 100644 --- a/helm-charts/common/tgi/templates/deployment.yaml +++ b/helm-charts/common/tgi/templates/deployment.yaml @@ -74,6 +74,9 @@ spec: hostPath: path: {{ .Values.global.modelUseHostPath }} type: Directory + {{- else if .Values.global.modelUsePV }} + persistentVolumeClaim: + claimName: {{ .Values.global.modelUsePV }} {{- else }} emptyDir: {} {{- end }} diff --git a/helm-charts/common/tgi/values.yaml b/helm-charts/common/tgi/values.yaml index 76dfcc1a..26893ecd 100644 --- a/helm-charts/common/tgi/values.yaml +++ b/helm-charts/common/tgi/values.yaml @@ -106,6 +106,10 @@ global: https_proxy: "" no_proxy: "" HUGGINGFACEHUB_API_TOKEN: "insert-your-huggingface-token-here" - # set modelUseHostPath to host directory if you want to use hostPath volume for model storage - # comment out modeluseHostPath if you want to download the model from huggingface + + # Choose where to save your downloaded models + # modelUseHostPath: Host directory path, this is good for one node test. + # modelUsePV: PersistentVolumeClaim(PVC) name, which is suitable for multinode deployment + # comment out both will not have model cache directory and download the model from huggingface. modelUseHostPath: /mnt/opea-models + # modelUsePV: model-volume diff --git a/helm-charts/common/whisper/templates/deployment.yaml b/helm-charts/common/whisper/templates/deployment.yaml index d766c62d..8a9c1a1f 100644 --- a/helm-charts/common/whisper/templates/deployment.yaml +++ b/helm-charts/common/whisper/templates/deployment.yaml @@ -74,6 +74,9 @@ spec: hostPath: path: {{ .Values.global.modelUseHostPath }} type: Directory + {{- else if .Values.global.modelUsePV }} + persistentVolumeClaim: + claimName: {{ .Values.global.modelUsePV }} {{- else }} emptyDir: {} {{- end }} diff --git a/helm-charts/common/whisper/values.yaml b/helm-charts/common/whisper/values.yaml index 9a7bb6fb..8c207fb8 100644 --- a/helm-charts/common/whisper/values.yaml +++ b/helm-charts/common/whisper/values.yaml @@ -83,4 +83,10 @@ global: http_proxy: "" https_proxy: "" no_proxy: "" + + # Choose where to save your downloaded models + # modelUseHostPath: Host directory path, this is good for one node test. + # modelUsePV: PersistentVolumeClaim(PVC) name, which is suitable for multinode deployment + # comment out both will not have model cache directory and download the model from huggingface. modelUseHostPath: /mnt/opea-models + # modelUsePV: model-volume