Skip to content

Commit

Permalink
fix conflict
Browse files Browse the repository at this point in the history
  • Loading branch information
WayneCao committed Oct 12, 2024
2 parents a5be94d + c6b74da commit 7cba435
Show file tree
Hide file tree
Showing 55 changed files with 1,182 additions and 371 deletions.
7 changes: 6 additions & 1 deletion api/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ DB_DATABASE=dify

# Storage configuration
# use for store upload files, private keys...
# storage type: local, s3, azure-blob, google-storage, tencent-cos, huawei-obs, volcengine-tos, baidu-obs
# storage type: local, s3, azure-blob, google-storage, tencent-cos, huawei-obs, volcengine-tos, baidu-obs, supabase
STORAGE_TYPE=local
STORAGE_LOCAL_PATH=storage
S3_USE_AWS_MANAGED_IAM=false
Expand Down Expand Up @@ -99,6 +99,11 @@ VOLCENGINE_TOS_ACCESS_KEY=your-access-key
VOLCENGINE_TOS_SECRET_KEY=your-secret-key
VOLCENGINE_TOS_REGION=your-region

# Supabase Storage Configuration
SUPABASE_BUCKET_NAME=your-bucket-name
SUPABASE_API_KEY=your-access-key
SUPABASE_URL=your-server-url

# CORS configuration
WEB_API_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
CONSOLE_CORS_ALLOW_ORIGINS=http://127.0.0.1:3000,*
Expand Down
19 changes: 19 additions & 0 deletions api/configs/middleware/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from configs.middleware.storage.google_cloud_storage_config import GoogleCloudStorageConfig
from configs.middleware.storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
from configs.middleware.storage.oci_storage_config import OCIStorageConfig
from configs.middleware.storage.supabase_storage_config import SupabaseStorageConfig
from configs.middleware.storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
from configs.middleware.storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
from configs.middleware.vdb.analyticdb_config import AnalyticdbConfig
Expand Down Expand Up @@ -191,6 +192,22 @@ def BROKER_USE_SSL(self) -> bool:
return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False


class InternalTestConfig(BaseSettings):
"""
Configuration settings for Internal Test
"""

AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
description="Internal test AWS secret access key",
default=None,
)

AWS_ACCESS_KEY_ID: Optional[str] = Field(
description="Internal test AWS access key ID",
default=None,
)


class MiddlewareConfig(
# place the configs in alphabet order
CeleryConfig,
Expand All @@ -206,6 +223,7 @@ class MiddlewareConfig(
HuaweiCloudOBSStorageConfig,
OCIStorageConfig,
S3StorageConfig,
SupabaseStorageConfig,
TencentCloudCOSStorageConfig,
VolcengineTOSStorageConfig,
# configs of vdb and vdb providers
Expand All @@ -224,5 +242,6 @@ class MiddlewareConfig(
TiDBVectorConfig,
WeaviateConfig,
ElasticsearchConfig,
InternalTestConfig,
):
pass
24 changes: 24 additions & 0 deletions api/configs/middleware/storage/supabase_storage_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from typing import Optional

from pydantic import BaseModel, Field


class SupabaseStorageConfig(BaseModel):
"""
Configuration settings for Supabase Object Storage Service
"""

SUPABASE_BUCKET_NAME: Optional[str] = Field(
description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
default=None,
)

SUPABASE_API_KEY: Optional[str] = Field(
description="API KEY for authenticating with Supabase",
default=None,
)

SUPABASE_URL: Optional[str] = Field(
description="URL of the Supabase",
default=None,
)
24 changes: 24 additions & 0 deletions api/controllers/console/datasets/external.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from services.dataset_service import DatasetService
from services.external_knowledge_service import ExternalDatasetService
from services.hit_testing_service import HitTestingService
from services.knowledge_service import ExternalDatasetTestService


def _validate_name(name):
Expand Down Expand Up @@ -232,8 +233,31 @@ def post(self, dataset_id):
raise InternalServerError(str(e))


class BedrockRetrievalApi(Resource):
# this api is only for internal testing
def post(self):
parser = reqparse.RequestParser()
parser.add_argument("retrieval_setting", nullable=False, required=True, type=dict, location="json")
parser.add_argument(
"query",
nullable=False,
required=True,
type=str,
)
parser.add_argument("knowledge_id", nullable=False, required=True, type=str)
args = parser.parse_args()

# Call the knowledge retrieval service
result = ExternalDatasetTestService.knowledge_retrieval(
args["retrieval_setting"], args["query"], args["knowledge_id"]
)
return result, 200


api.add_resource(ExternalKnowledgeHitTestingApi, "/datasets/<uuid:dataset_id>/external-hit-testing")
api.add_resource(ExternalDatasetCreateApi, "/datasets/external")
api.add_resource(ExternalApiTemplateListApi, "/datasets/external-knowledge-api")
api.add_resource(ExternalApiTemplateApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>")
api.add_resource(ExternalApiUseCheckApi, "/datasets/external-knowledge-api/<uuid:external_knowledge_api_id>/use-check")
# this api is only for internal test
api.add_resource(BedrockRetrievalApi, "/test/retrieval")
7 changes: 3 additions & 4 deletions api/controllers/console/workspace/model_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,13 +126,12 @@ class ModelProviderIconApi(Resource):
Get model provider icon
"""

@setup_required
@login_required
@account_initialization_required
def get(self, provider: str, icon_type: str, lang: str):
model_provider_service = ModelProviderService()
icon, mimetype = model_provider_service.get_model_provider_icon(
provider=provider, icon_type=icon_type, lang=lang
provider=provider,
icon_type=icon_type,
lang=lang,
)

return send_file(io.BytesIO(icon), mimetype=mimetype)
Expand Down
3 changes: 3 additions & 0 deletions api/core/app/apps/advanced_chat/generate_task_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
from models.model import Conversation, EndUser, Message
from models.workflow import (
Workflow,
WorkflowNodeExecution,
WorkflowRunStatus,
)

Expand All @@ -72,6 +73,7 @@ class AdvancedChatAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCyc
_workflow: Workflow
_user: Union[Account, EndUser]
_workflow_system_variables: dict[SystemVariableKey, Any]
_wip_workflow_node_executions: dict[str, WorkflowNodeExecution]

def __init__(
self,
Expand Down Expand Up @@ -115,6 +117,7 @@ def __init__(
}

self._task_state = WorkflowTaskState()
self._wip_workflow_node_executions = {}

self._conversation_name_generate_thread = None

Expand Down
3 changes: 3 additions & 0 deletions api/core/app/apps/workflow/generate_task_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
Workflow,
WorkflowAppLog,
WorkflowAppLogCreatedFrom,
WorkflowNodeExecution,
WorkflowRun,
WorkflowRunStatus,
)
Expand All @@ -69,6 +70,7 @@ class WorkflowAppGenerateTaskPipeline(BasedGenerateTaskPipeline, WorkflowCycleMa
_task_state: WorkflowTaskState
_application_generate_entity: WorkflowAppGenerateEntity
_workflow_system_variables: dict[SystemVariableKey, Any]
_wip_workflow_node_executions: dict[str, WorkflowNodeExecution]

def __init__(
self,
Expand Down Expand Up @@ -103,6 +105,7 @@ def __init__(
}

self._task_state = WorkflowTaskState()
self._wip_workflow_node_executions = {}

def process(self) -> Union[WorkflowAppBlockingResponse, Generator[WorkflowAppStreamResponse, None, None]]:
"""
Expand Down
6 changes: 5 additions & 1 deletion api/core/app/task_pipeline/message_cycle_manage.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import logging
from threading import Thread
from typing import Optional, Union

from flask import Flask, current_app

from configs import dify_config
from core.app.entities.app_invoke_entities import (
AdvancedChatAppGenerateEntity,
AgentChatAppGenerateEntity,
Expand Down Expand Up @@ -83,7 +85,9 @@ def _generate_conversation_name_worker(self, flask_app: Flask, conversation_id:
name = LLMGenerator.generate_conversation_name(app_model.tenant_id, query)
conversation.name = name
except Exception as e:
logging.exception(f"generate conversation name failed: {e}")
if dify_config.DEBUG:
logging.exception(f"generate conversation name failed: {e}")
pass

db.session.merge(conversation)
db.session.commit()
Expand Down
72 changes: 48 additions & 24 deletions api/core/app/task_pipeline/workflow_cycle_manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ class WorkflowCycleManage:
_user: Union[Account, EndUser]
_task_state: WorkflowTaskState
_workflow_system_variables: dict[SystemVariableKey, Any]
_wip_workflow_node_executions: dict[str, WorkflowNodeExecution]

def _handle_workflow_run_start(self) -> WorkflowRun:
max_sequence = (
Expand Down Expand Up @@ -251,6 +252,8 @@ def _handle_node_execution_start(
db.session.refresh(workflow_node_execution)
db.session.close()

self._wip_workflow_node_executions[workflow_node_execution.node_execution_id] = workflow_node_execution

return workflow_node_execution

def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent) -> WorkflowNodeExecution:
Expand All @@ -263,20 +266,36 @@ def _handle_workflow_node_execution_success(self, event: QueueNodeSucceededEvent

inputs = WorkflowEntry.handle_special_values(event.inputs)
outputs = WorkflowEntry.handle_special_values(event.outputs)
execution_metadata = (
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
)
finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
elapsed_time = (finished_at - event.start_at).total_seconds()

db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
{
WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.SUCCEEDED.value,
WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None,
WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None,
WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None,
WorkflowNodeExecution.execution_metadata: execution_metadata,
WorkflowNodeExecution.finished_at: finished_at,
WorkflowNodeExecution.elapsed_time: elapsed_time,
}
)

db.session.commit()
db.session.close()

workflow_node_execution.status = WorkflowNodeExecutionStatus.SUCCEEDED.value
workflow_node_execution.inputs = json.dumps(inputs) if inputs else None
workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None
workflow_node_execution.outputs = json.dumps(outputs) if outputs else None
workflow_node_execution.execution_metadata = (
json.dumps(jsonable_encoder(event.execution_metadata)) if event.execution_metadata else None
)
workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.elapsed_time = (workflow_node_execution.finished_at - event.start_at).total_seconds()
workflow_node_execution.execution_metadata = execution_metadata
workflow_node_execution.finished_at = finished_at
workflow_node_execution.elapsed_time = elapsed_time

db.session.commit()
db.session.refresh(workflow_node_execution)
db.session.close()
self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id)

return workflow_node_execution

Expand All @@ -290,18 +309,33 @@ def _handle_workflow_node_execution_failed(self, event: QueueNodeFailedEvent) ->

inputs = WorkflowEntry.handle_special_values(event.inputs)
outputs = WorkflowEntry.handle_special_values(event.outputs)
finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
elapsed_time = (finished_at - event.start_at).total_seconds()

db.session.query(WorkflowNodeExecution).filter(WorkflowNodeExecution.id == workflow_node_execution.id).update(
{
WorkflowNodeExecution.status: WorkflowNodeExecutionStatus.FAILED.value,
WorkflowNodeExecution.error: event.error,
WorkflowNodeExecution.inputs: json.dumps(inputs) if inputs else None,
WorkflowNodeExecution.process_data: json.dumps(event.process_data) if event.process_data else None,
WorkflowNodeExecution.outputs: json.dumps(outputs) if outputs else None,
WorkflowNodeExecution.finished_at: finished_at,
WorkflowNodeExecution.elapsed_time: elapsed_time,
}
)

db.session.commit()
db.session.close()

workflow_node_execution.status = WorkflowNodeExecutionStatus.FAILED.value
workflow_node_execution.error = event.error
workflow_node_execution.finished_at = datetime.now(timezone.utc).replace(tzinfo=None)
workflow_node_execution.inputs = json.dumps(inputs) if inputs else None
workflow_node_execution.process_data = json.dumps(event.process_data) if event.process_data else None
workflow_node_execution.outputs = json.dumps(outputs) if outputs else None
workflow_node_execution.elapsed_time = (workflow_node_execution.finished_at - event.start_at).total_seconds()
workflow_node_execution.finished_at = finished_at
workflow_node_execution.elapsed_time = elapsed_time

db.session.commit()
db.session.refresh(workflow_node_execution)
db.session.close()
self._wip_workflow_node_executions.pop(workflow_node_execution.node_execution_id)

return workflow_node_execution

Expand Down Expand Up @@ -678,17 +712,7 @@ def _refetch_workflow_node_execution(self, node_execution_id: str) -> WorkflowNo
:param node_execution_id: workflow node execution id
:return:
"""
workflow_node_execution = (
db.session.query(WorkflowNodeExecution)
.filter(
WorkflowNodeExecution.tenant_id == self._application_generate_entity.app_config.tenant_id,
WorkflowNodeExecution.app_id == self._application_generate_entity.app_config.app_id,
WorkflowNodeExecution.workflow_id == self._workflow.id,
WorkflowNodeExecution.triggered_from == WorkflowNodeExecutionTriggeredFrom.WORKFLOW_RUN.value,
WorkflowNodeExecution.node_execution_id == node_execution_id,
)
.first()
)
workflow_node_execution = self._wip_workflow_node_executions.get(node_execution_id)

if not workflow_node_execution:
raise Exception(f"Workflow node execution not found: {node_execution_id}")
Expand Down
Loading

0 comments on commit 7cba435

Please sign in to comment.