Skip to content

Commit

Permalink
✨ feat: Support custom help message
Browse files Browse the repository at this point in the history
  • Loading branch information
hibobmaster committed May 8, 2024
1 parent eead156 commit e32b96c
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 10 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
# Changelog

## 1.8.0
- Support custom help message

## 1.7.2
- Refactor gpt vision trigger method
- !pic, !help, and gpt vision in thread chat
Expand Down
1 change: 1 addition & 0 deletions compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ services:
- ./sync_db:/app/sync_db
- ./context.db:/app/context.db
# - ./manage_db:/app/manage_db
# - ./custom_help_message.txt:/app/custom_help_message.txt
# import_keys path
# - ./element-keys.txt:/app/element-keys.txt
networks:
Expand Down
11 changes: 11 additions & 0 deletions custom_help_message.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
Hi there, welcome to our chat room!
Our bot is powered by opensource project [matrix_chatgpt_bot](https://github.com/hibobmaster/matrix_chatgpt_bot).
Here are some commands you can use to interact with the bot.
!gpt [prompt], generate a one time response without context conversation
!chat [prompt], chat with context conversation
!pic [prompt], Image generation by DALL-E-3 or LocalAI or stable-diffusion-webui
!new + chat, start a new conversation
!lc [prompt], chat using langchain api
quote a image and @bot with prompt, gpt vision function
@bot with prompt, create a thread level chatting
!help, help message
26 changes: 16 additions & 10 deletions src/bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@ def __init__(
gpt_vision_model: Optional[str] = None,
gpt_vision_api_endpoint: Optional[str] = None,
timeout: Union[float, None] = None,
custom_help_message: Optional[str] = None,
):
if homeserver is None or user_id is None or device_id is None:
logger.error("homeserver && user_id && device_id is required")
Expand Down Expand Up @@ -227,6 +228,8 @@ def __init__(
self.help_prog = re.compile(r"\s*!help\s*.*$")
self.new_prog = re.compile(r"\s*!new\s+(.+)$")

self.custom_help_message = custom_help_message

async def close(self, task: asyncio.Task) -> None:
self.chatbot.cursor.close()
self.chatbot.conn.close()
Expand Down Expand Up @@ -1818,16 +1821,19 @@ async def help(
reply_in_thread=False,
thread_root_id=None,
):
help_info = (
"!gpt [prompt], generate a one time response without context conversation\n"
+ "!chat [prompt], chat with context conversation\n"
+ "!pic [prompt], Image generation by DALL-E-3 or LocalAI or stable-diffusion-webui\n" # noqa: E501
+ "!new + chat, start a new conversation \n"
+ "!lc [prompt], chat using langchain api\n"
+ "quote a image and @bot with prompt, gpt vision function\n"
+ "@bot with prompt, create a thread level chatting\n"
+ "!help, help message"
) # noqa: E501
if self.custom_help_message:
help_info = self.custom_help_message
else:
help_info = (
"!gpt [prompt], generate a one time response without context conversation\n"
+ "!chat [prompt], chat with context conversation\n"
+ "!pic [prompt], Image generation by DALL-E-3 or LocalAI or stable-diffusion-webui\n" # noqa: E501
+ "!new + chat, start a new conversation \n"
+ "!lc [prompt], chat using langchain api\n"
+ "quote a image and @bot with prompt, gpt vision function\n"
+ "@bot with prompt, create a thread level chatting\n"
+ "!help, help message"
) # noqa: E501

await send_room_message(
self.client,
Expand Down
18 changes: 18 additions & 0 deletions src/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,22 @@
async def main():
need_import_keys = False
config_path = Path(os.path.dirname(__file__)).parent / "config.json"
help_message_path = (
Path(os.path.dirname(__file__)).parent / "custom_help_message.txt"
)

if os.path.isfile(help_message_path):
try:
f = open(help_message_path, encoding="utf8")
custom_help_message = ""
for line in f.readlines():
custom_help_message += line
except Exception as e:
logger.error(e)
sys.exit(1)
else:
custom_help_message = None

if os.path.isfile(config_path):
try:
fp = open(config_path, encoding="utf8")
Expand Down Expand Up @@ -52,6 +68,7 @@ async def main():
gpt_vision_model=config.get("gpt_vision_model"),
gpt_vision_api_endpoint=config.get("gpt_vision_api_endpoint"),
timeout=config.get("timeout"),
custom_help_message=custom_help_message,
)
if (
config.get("import_keys_path")
Expand Down Expand Up @@ -90,6 +107,7 @@ async def main():
gpt_vision_model=os.environ.get("GPT_VISION_MODEL"),
gpt_vision_api_endpoint=os.environ.get("GPT_VISION_API_ENDPOINT"),
timeout=float(os.environ.get("TIMEOUT", 120.0)),
custom_help_message=custom_help_message,
)
if (
os.environ.get("IMPORT_KEYS_PATH")
Expand Down

0 comments on commit e32b96c

Please sign in to comment.