Skip to content

Commit

Permalink
Merge branch 'main' into feature/bump-dialog-lib
Browse files Browse the repository at this point in the history
  • Loading branch information
vmesel authored Jul 14, 2024
2 parents 6f2666c + 2240ada commit f4fad40
Show file tree
Hide file tree
Showing 7 changed files with 72 additions and 19 deletions.
18 changes: 18 additions & 0 deletions docker-compose.dev-container.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,21 @@ services:
- PGPASSWORD=talkdai
env_file:
- .env
openwebui:
image: ghcr.io/open-webui/open-webui:main
ports:
- '3000:8080'
environment:
- OPENAI_API_KEYS=FAKE-KEY;
- OPENAI_API_BASE_URLS=http://dialog:8000/openai;
- ENABLE_OPENAI_API=true
volumes:
- open-webui:/app/backend/data
depends_on:
db:
condition: service_healthy
dialog:
condition: service_started

volumes:
open-webui:
4 changes: 3 additions & 1 deletion docs/quick-start.md
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ See [our documentation](https://dialog.talkd.ai/settings#csv-knowledge-base) for

#### `.toml` prompt configuration

The `[prompt.header]`, `[prompt.suggested]`, and `[fallback.prompt]` fields are mandatory fields used for processing the conversation and connecting to the LLM.
The `[prompt.header]`, `[prompt.history_header]`, `[prompt.suggested]`, and `[fallback.prompt]` fields are mandatory fields used for processing the conversation and connecting to the LLM.

The `[prompt.fallback]` field is used when the LLM does not find a compatible embedding in the database; that is, the `[prompt.header]` **is ignored** and the `[prompt.fallback]` is used. Without it, there could be hallucinations about possible answers to questions outside the scope of the embeddings.

Expand All @@ -117,6 +117,8 @@ qualified service to high-end customers. Be brief in your answers, without being
and objective in your responses. Never say that you are a model (AI), always answer as Avelino.
Be polite and friendly!"""
history_header = """This is the history of the conversation with the user:"""
suggested = "Here is some possible content
that could help the user in a better way."
Expand Down
2 changes: 1 addition & 1 deletion docs/settings.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ Here is a brief explanation of the environment variables:
- `DIALOG_LOADCSV_CLEARDB`: if set to `true`, the script `load_csv.py` will delete all previously imported vectors and reimport everything again.
- `COSINE_SIMILARITY_THRESHOLD`: the cosine similarity threshold used to filter the results from the database's similarity query. The default is `0.5`.
- `PLUGINS`: the path to the plugins that will be loaded into the application comma-separated. An example is: `dialog-whatsapp,plugins.my-custom-plugin`.

- `OPENWEB_UI_SESSION`: the session ID used to connect to the OpenWeb UI API. By default, it is `dialog-openweb-ui` and it can be changed.

## CSV format

Expand Down
4 changes: 4 additions & 0 deletions sample_data/prompt.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@ suggested = """
Here are some contents that you need to use in order to help you with the user's question:
"""

history_header = """
This is the history of the conversation:
"""

[fallback]
prompt = """
Answer the following message to the user.
Expand Down
32 changes: 22 additions & 10 deletions src/dialog/llm/agents/default.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
from dialog.settings import Settings
from dialog.db import get_session


class DialogLLM(AbstractRAG):
def __init__(self, *args, **kwargs):
kwargs["dbsession"] = next(get_session())
Expand Down Expand Up @@ -47,24 +48,35 @@ def generate_prompt(self, text):
self.config.get("fallback").get("prompt") # maintaining compatibility with the previous configuration
header = prompt_config.get("header")
suggested = prompt_config.get("suggested")
history_header = prompt_config.get("history_header")
messages = []

messages.append(SystemMessagePromptTemplate.from_template(header))
if len(self.relevant_contents) > 0:
context = "Context: \n".join(
[f"{c.question}\n{c.content}\n" for c in self.relevant_contents]
)
messages.append(SystemMessagePromptTemplate.from_template(header))
messages.append(SystemMessagePromptTemplate.from_template(
f"{suggested}. {context}"))
messages.append(
MessagesPlaceholder(
variable_name="chat_history", optional=True))
messages.append(
HumanMessagePromptTemplate.from_template("{user_message}"))
SystemMessagePromptTemplate.from_template(f"{suggested}. {context}")
)
else:
messages.append(
SystemMessagePromptTemplate.from_template(fallback))
messages.append(
HumanMessagePromptTemplate.from_template("{user_message}"))
SystemMessagePromptTemplate.from_template(fallback)
)

messages.append(
SystemMessagePromptTemplate.from_template(history_header)
)

messages.append(
MessagesPlaceholder(
variable_name="chat_history"
)
)

messages.append(
HumanMessagePromptTemplate.from_template("{user_message}")
)
self.prompt = ChatPromptTemplate.from_messages(messages)

if Settings().VERBOSE_LLM:
Expand Down
22 changes: 16 additions & 6 deletions src/dialog/routers/openai.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# *-* coding: utf-8 *-*
from uuid import uuid4
import datetime
import os
import logging
import datetime
from uuid import uuid4

from dialog.db import engine, get_session
from dialog_lib.db.models import Chat as ChatEntity, ChatMessages
Expand Down Expand Up @@ -44,11 +45,20 @@ async def ask_question_to_llm(message: OpenAIChat, session: Session = Depends(ge
"""
This posts a message to the LLM and returns the response in the OpenAI format.
"""

start_time = datetime.datetime.now()
new_chat = ChatEntity(
session_id = f"openai-{str(uuid4())}",
)
session.add(new_chat)
chat_entity = session.query(ChatEntity).filter(ChatEntity.session_id == Settings().OPENWEB_UI_SESSION).first()

if not chat_entity:
logging.info("Creating new chat entity")
new_chat = ChatEntity(
session_id = Settings().OPENWEB_UI_SESSION,
)
session.add(new_chat)
session.flush()
else:
logging.info("Using old chat entity")
new_chat = chat_entity

non_empty_messages = []

Expand Down
9 changes: 8 additions & 1 deletion src/dialog/settings.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
import tomllib
import logging
from uuid import uuid4
from pathlib import Path
from decouple import Csv, Config

config = Config(".env")

logger = logging.getLogger(__name__)

openweb_ui_session_id_fallback = uuid4()

class Settings:

@property
Expand Down Expand Up @@ -113,4 +116,8 @@ def CORS_ALLOW_METHODS(self):

@property
def CORS_ALLOW_HEADERS(self):
return config.get("CORS_ALLOW_HEADERS", cast=Csv(), default="*")
return config.get("CORS_ALLOW_HEADERS", cast=Csv(), default="*")

@property
def OPENWEB_UI_SESSION(self):
return config.get("OPENWEB_UI_SESSION", f"dialog-openweb-ui")

0 comments on commit f4fad40

Please sign in to comment.