Skip to content

Commit

Permalink
Fix compatibility issue with older memory provider
Browse files Browse the repository at this point in the history
  • Loading branch information
vmesel committed Oct 30, 2024
1 parent 562a214 commit c24e6d5
Show file tree
Hide file tree
Showing 5 changed files with 67 additions and 52 deletions.
1 change: 0 additions & 1 deletion docker-compose-open-webui.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: '3.3'
services:
db:
image: pgvector/pgvector:pg15
Expand Down
1 change: 0 additions & 1 deletion docker-compose.dev-container.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
version: "3.3"
services:
db:
image: pgvector/pgvector:pg15
Expand Down
72 changes: 37 additions & 35 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@ uvicorn = { extras = ["standard"], version = "0.24.0.post1" }
psycopg2 = "2.9.9"
python-decouple = "3.8"
pydantic = "2.7.4"
psycopg = "3.1.12"
psycopg = "3.2.2"
tiktoken = "<1"
pandas = "2.1.3"
scikit-learn = "1.5.0"
alembic = "1.12.1"
importlib-metadata = "7.0.1"
pyarrow = "15.0.0"
iniconfig = "2.0.0"
dialog-lib = "0.0.2.7"
dialog-lib = "0.0.3.0"
langserve = "0.2.0"
sse-starlette = "2.1.0"

Expand Down
41 changes: 28 additions & 13 deletions src/dialog/routers/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,34 @@ async def get_models():
Returns the model that is available inside Dialog in the OpenAI format.
"""

return [OpenAIModel(**{
"id": "talkd-ai",
"object": "model",
"created": int(datetime.datetime.now().timestamp()),
"owned_by": "system"
})] + [
OpenAIModel(**{
"id": model["model_name"],
"object": "model",
"created": int(datetime.datetime.now().timestamp()),
"owned_by": "system"
}) for model in Settings().PROJECT_CONFIG.get("endpoint", [])
]
return {
"models": [
OpenAIModel(**{
"id": "talkd-ai",
"object": "model",
"created": int(datetime.datetime.now().timestamp()),
"owned_by": "system",
"digest": str(uuid4())
})
] + [
OpenAIModel(**{
"id": model["model_name"],
"object": "model",
"created": int(datetime.datetime.now().timestamp()),
"owned_by": "system",
"digest": str(uuid4())
}) for model in Settings().PROJECT_CONFIG.get("endpoint", [])
]
}

@open_ai_api_router.get("/api/tags")
async def get_tags():
"""
Returns the tags that are available inside Dialog in the OpenAI format.
"""

return await get_models()


@open_ai_api_router.post("/chat/completions")
async def ask_question_to_llm(message: OpenAIChat, session: Session = Depends(get_session)):
Expand Down

0 comments on commit c24e6d5

Please sign in to comment.