Skip to content

Commit

Permalink
fix a bug with anthropic client and update the default system prompt …
Browse files Browse the repository at this point in the history
…to always have a sys message
  • Loading branch information
liyin2015 committed Jul 8, 2024
1 parent a933ceb commit e5a96a9
Show file tree
Hide file tree
Showing 5 changed files with 25 additions and 13 deletions.
6 changes: 6 additions & 0 deletions lightrag/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
## [0.0.0-alpha.16] - 2024-07-08

### Fixed
- Anthropic client message does not use system role. For now, we put the whole prompt as the first user message.
- Update the `DEDEFAULT_LIGHTRAG_SYSTEM_PROMPT` to include 'You are a helpful assistant' as default <SYS> prompt.

## [0.0.0-alpha.15] - 2024-07-07

### Fixed
Expand Down
17 changes: 12 additions & 5 deletions lightrag/lightrag/components/model_client/anthropic_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,19 +72,26 @@ def parse_chat_completion(self, completion: Message) -> str:
log.debug(f"completion: {completion}")
return completion.content[0].text

# TODO: potentially use <SYS></SYS> to separate the system and user messages. This requires user to follow it. If it is not found, then we will only use user message.
def convert_inputs_to_api_kwargs(
self,
input: Optional[Any] = None,
model_kwargs: Dict = {},
model_type: ModelType = ModelType.UNDEFINED,
) -> dict:
r"""Anthropic API messages separates the system and the user messages.
As we focus on one prompt, we have to use the user message as the input.
api: https://docs.anthropic.com/en/api/messages
"""
api_kwargs = model_kwargs.copy()
if model_type == ModelType.LLM:
# api_kwargs["messages"] = [
# {"role": "user", "content": input},
# ]
if input and input != "":
api_kwargs["system"] = input
api_kwargs["messages"] = [
{"role": "user", "content": input},
]
# if input and input != "":
# api_kwargs["system"] = input
else:
raise ValueError(f"Model type {model_type} not supported")
return api_kwargs
Expand Down
8 changes: 3 additions & 5 deletions lightrag/lightrag/core/default_prompt_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,12 @@
User: {{input_str}}
You:"""

DEFAULT_LIGHTRAG_SYSTEM_PROMPT = r"""{% if task_desc_str or output_format_str or tools_str or examples_str or chat_history_str or context_str or steps_str %}
<SYS>
{% endif %}
DEFAULT_LIGHTRAG_SYSTEM_PROMPT = r"""<SYS>
{# task desc #}
{% if task_desc_str %}
{{task_desc_str}}
{% else %}
You are a helpful assistant.
{% endif %}
{# output format #}
{% if output_format_str %}
Expand Down Expand Up @@ -68,9 +68,7 @@
{{steps_str}}
</STEPS>
{% endif %}
{% if task_desc_str or output_format_str or tools_str or examples_str or chat_history_str or context_str or steps_str %}
</SYS>
{% endif %}
{% if input_str %}
<User>
{{input_str}}
Expand Down
5 changes: 3 additions & 2 deletions lightrag/lightrag/core/generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -232,10 +232,11 @@ def call(
}
prompt_kwargs.update(trained_prompt_kwargs)

log.info(f"prompt_kwargs: {prompt_kwargs}")
log.info(f"model_kwargs: {model_kwargs}")
log.debug(f"prompt_kwargs: {prompt_kwargs}")
log.debug(f"model_kwargs: {model_kwargs}")

api_kwargs = self._pre_call(prompt_kwargs, model_kwargs)
log.debug(f"api_kwargs: {api_kwargs}")
output: GeneratorOutputType = None
# call the model client
try:
Expand Down
2 changes: 1 addition & 1 deletion lightrag/pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
[tool.poetry]
name = "lightrag"

version = "0.0.0-alpha.15"
version = "0.0.0-alpha.16"
description = "The 'PyTorch' library for LLM applications. RAG=Retriever-Agent-Generator."
authors = ["Li Yin <[email protected]>"]
readme = "README.md"
Expand Down

0 comments on commit e5a96a9

Please sign in to comment.