Skip to content

Commit

Permalink
Fix DBCopilot Vulnerabilities (#3396)
Browse files Browse the repository at this point in the history
* Fix DBCopilot Vulnerabilities

* fix
  • Loading branch information
ricardrao authored Sep 19, 2024
1 parent 2bcd175 commit 8fd46c5
Show file tree
Hide file tree
Showing 9 changed files with 22 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ tags:
Preview: ""
name: llm_ingest_db_to_acs
display_name: LLM - SQL Datastore to ACS Pipeline
version: 0.0.95
version: 0.0.96
description: Single job pipeline to chunk data from AzureML sql data store, and create ACS embeddings index
settings:
default_compute: serverless
Expand Down Expand Up @@ -154,7 +154,7 @@ jobs:
output_grounding_context_file: ${{parent.outputs.db_context}}
environment_variables:
MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}}
component: "azureml:llm_dbcopilot_grounding:0.0.69"
component: "azureml:llm_dbcopilot_grounding:0.0.70"
type: command
generate_meta_embeddings:
type: command
Expand Down Expand Up @@ -221,7 +221,7 @@ jobs:
#########################################
db_sample_loading_generator:
type: command
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.44"
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.45"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down Expand Up @@ -336,5 +336,5 @@ jobs:
path: ${{parent.inputs.include_views}}
instruct_template:
path: ${{parent.inputs.instruct_template}}
component: "azureml:llm_dbcopilot_create_promptflow:0.0.69"
component: "azureml:llm_dbcopilot_create_promptflow:0.0.70"
type: command
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ tags:
Preview: ""
name: llm_ingest_db_to_faiss
display_name: LLM - SQL Datastore to FAISS Pipeline
version: 0.0.95
version: 0.0.96
description: Single job pipeline to chunk data from AzureML sql data store, and create FAISS embeddings index
settings:
default_compute: serverless
Expand Down Expand Up @@ -144,7 +144,7 @@ jobs:
output_grounding_context_file: ${{parent.outputs.db_context}}
environment_variables:
MANAGED_IDENTITY_ENABLED: ${{parent.inputs.managed_identity_enabled}}
component: "azureml:llm_dbcopilot_grounding:0.0.69"
component: "azureml:llm_dbcopilot_grounding:0.0.70"
type: command
generate_meta_embeddings:
type: command
Expand All @@ -154,7 +154,7 @@ jobs:
properties:
compute_specification:
automatic: true
component: "azureml:llm_rag_generate_embeddings:0.0.58"
component: "azureml:llm_rag_generate_embeddings:0.0.64"
inputs:
chunks_source:
type: uri_folder
Expand Down Expand Up @@ -209,7 +209,7 @@ jobs:
#########################################
db_sample_loading_generator:
type: command
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.44"
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.45"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down Expand Up @@ -320,5 +320,5 @@ jobs:
path: ${{parent.inputs.include_views}}
instruct_template:
path: ${{parent.inputs.instruct_template}}
component: "azureml:llm_dbcopilot_create_promptflow:0.0.69"
component: "azureml:llm_dbcopilot_create_promptflow:0.0.70"
type: command
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ $schema: https://azuremlschemas.azureedge.net/latest/pipelineComponent.schema.js
type: pipeline

name: llm_ingest_dbcopilot_acs_e2e
version: 0.0.64
version: 0.0.65
display_name: Data Ingestion for DB Data Output to ACS E2E Deployment
description: Single job pipeline to chunk data from AzureML DB Datastore and create acs embeddings index

Expand Down Expand Up @@ -141,7 +141,7 @@ jobs:
#########################################
db_meta_loading_generator:
type: command
component: "azureml:llm_dbcopilot_grounding:0.0.69"
component: "azureml:llm_dbcopilot_grounding:0.0.70"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down Expand Up @@ -213,7 +213,7 @@ jobs:
#########################################
db_sample_loading_generator:
type: command
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.44"
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.45"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down Expand Up @@ -275,7 +275,7 @@ jobs:
#########################################
endpoint_deployment_job:
type: command
component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.45"
component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.46"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ $schema: https://azuremlschemas.azureedge.net/latest/pipelineComponent.schema.js
type: pipeline

name: llm_ingest_dbcopilot_faiss_e2e
version: 0.0.64
version: 0.0.65
display_name: Data Ingestion for DB Data Output to FAISS E2E Deployment
description: Single job pipeline to chunk data from AzureML DB Datastore and create faiss embeddings index

Expand Down Expand Up @@ -131,7 +131,7 @@ jobs:
#########################################
db_meta_loading_generator:
type: command
component: "azureml:llm_dbcopilot_grounding:0.0.69"
component: "azureml:llm_dbcopilot_grounding:0.0.70"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down Expand Up @@ -201,7 +201,7 @@ jobs:
#########################################
db_sample_loading_generator:
type: command
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.44"
component: "azureml:llm_dbcopilot_grounding_ground_samples:0.0.45"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down Expand Up @@ -259,7 +259,7 @@ jobs:
#########################################
endpoint_deployment_job:
type: command
component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.45"
component: "azureml:llm_dbcopilot_deploy_endpoint:0.0.46"
resources:
instance_count: ${{parent.inputs.serverless_instance_count}}
instance_type: ${{parent.inputs.serverless_instance_type}}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ tags:
Preview: ""
name: llm_dbcopilot_create_promptflow
display_name: LLM - Create DBCopilot Prompt Flow
version: 0.0.69
version: 0.0.70
inputs:
index_name:
type: string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ type: command
tags: {}
name: llm_dbcopilot_deploy_endpoint
display_name: LLM - DBCopilot Deploy Endpoint Component
version: 0.0.45
version: 0.0.46
inputs:
deployment_name:
type: string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ type: command
tags:
Preview: ""
name: llm_dbcopilot_grounding
version: 0.0.69
version: 0.0.70
inputs:
asset_uri:
type: string
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ type: command
tags: {}
name: llm_dbcopilot_grounding_ground_samples
display_name: LLM - DBCopilot Grounding Ground Samples Component
version: 0.0.44
version: 0.0.45
inputs:
grounding_context:
type: uri_folder
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ RUN python -m pip install --upgrade pip && \
# Fix vunerabilities
RUN /bin/bash -c "source /opt/miniconda/etc/profile.d/conda.sh && \
conda activate /opt/miniconda/envs/amlenv && \
pip install sqlparse==0.5.0 gunicorn==22.0.0 Werkzeug==3.0.3 azure-identity==1.16.1 certifi==2024.07.04 setuptools==70.0.0&& \
pip install sqlparse==0.5.0 gunicorn==22.0.0 Werkzeug==3.0.3 azure-identity==1.16.1 certifi==2024.07.04 --upgrade protobuf&& \
conda deactivate"

# For GUNICORN_CMD_ARGS, we need to set the timeout to be 0 so that the server will not timeout
Expand Down

0 comments on commit 8fd46c5

Please sign in to comment.