From 88a911e3422ceaae7c808f2912324a226b62f3d3 Mon Sep 17 00:00:00 2001 From: "Mir Muhammad Abidul Haq (Ahnaf)" <76505613+Ahnaf-nub@users.noreply.github.com> Date: Mon, 5 Aug 2024 06:11:54 +0600 Subject: [PATCH 1/4] Create README.md --- python/community/llama-bot/README.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 python/community/llama-bot/README.md diff --git a/python/community/llama-bot/README.md b/python/community/llama-bot/README.md new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/python/community/llama-bot/README.md @@ -0,0 +1 @@ + From e97a7737d482870f7388f236b663d8cac094afcb Mon Sep 17 00:00:00 2001 From: "Mir Muhammad Abidul Haq (Ahnaf)" <76505613+Ahnaf-nub@users.noreply.github.com> Date: Mon, 5 Aug 2024 06:13:18 +0600 Subject: [PATCH 2/4] Create requirements.txt --- python/community/llama-bot/requirements.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 python/community/llama-bot/requirements.txt diff --git a/python/community/llama-bot/requirements.txt b/python/community/llama-bot/requirements.txt new file mode 100644 index 00000000..1919a6ac --- /dev/null +++ b/python/community/llama-bot/requirements.txt @@ -0,0 +1,2 @@ +flet +groq From abe2444684175fbf06c2b214ea243fab67d45644 Mon Sep 17 00:00:00 2001 From: "Mir Muhammad Abidul Haq (Ahnaf)" <76505613+Ahnaf-nub@users.noreply.github.com> Date: Mon, 5 Aug 2024 06:17:40 +0600 Subject: [PATCH 3/4] Update README.md --- python/community/llama-bot/README.md | 42 ++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/python/community/llama-bot/README.md b/python/community/llama-bot/README.md index 8b137891..1f29a89e 100644 --- a/python/community/llama-bot/README.md +++ b/python/community/llama-bot/README.md @@ -1 +1,43 @@ +# Llama 3.1 Chatbot with Flet +A simple chatbot application using Llama 3.1 model from Groq, built with the Flet framework. This app provides a user interface for sending messages to the chatbot and receiving responses. +## Installation +1. Clone the repository: + + ```bash + git clone https://github.com/yourusername/llama-chatbot.git + cd llama-chatbot + ``` + +2. Install the required Python packages: + + ```bash + pip install flet groq + ``` + +3. Set up your Groq API key. Follow the instructions below to obtain your API key. + +## Getting the API Key from Groq + +1. Visit the [Groq website](https://groq.com) and sign up for an account. +2. Navigate to the API section of your account settings. +3. Generate a new API key and copy it. + +## Usage + +1. Open the `chatbot.py` file in a text editor. +2. Replace the placeholder API key with your Groq API key: + + ```python + client = Groq( + api_key='your_groq_api_key_here', + ) + ``` + +3. Save the file and run the application: + + ```bash + python chatbot.py + ``` + +4. The application will open a window with the chat interface. Type your message and press "Send" to interact with the chatbot. From c28296544428c20c37ef2d26dec9c2153d5ee8b8 Mon Sep 17 00:00:00 2001 From: "Mir Muhammad Abidul Haq (Ahnaf)" <76505613+Ahnaf-nub@users.noreply.github.com> Date: Mon, 5 Aug 2024 06:19:25 +0600 Subject: [PATCH 4/4] Create app.py --- python/community/llama-bot/app.py | 59 +++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 python/community/llama-bot/app.py diff --git a/python/community/llama-bot/app.py b/python/community/llama-bot/app.py new file mode 100644 index 00000000..faa30627 --- /dev/null +++ b/python/community/llama-bot/app.py @@ -0,0 +1,59 @@ +from groq import Groq +import flet as ft + +# Initialize Groq client with API key +client = Groq( + api_key='your_groq_api_key_here', +) + +class Message: + def __init__(self, user: str, text: str, response_text: str): + self.user = user + self.text = text + self.response_text = response_text + +def main(page: ft.Page): + chat = ft.ListView(expand=True, spacing=10, padding=10, auto_scroll=True) + new_message = ft.TextField(expand=True, hint_text="Type your message here...") + + def on_message(message: Message): + chat.controls.append(ft.Text(f"User: {message.text}")) + chat.controls.append(ft.Text(f"Bot: {message.response_text}")) + page.update() + + page.pubsub.subscribe(on_message) + + def send_click(e): + user_message = new_message.value + if user_message: + new_message.value = "" + processing_text = ft.Text("Processing answer...", color="blue") + chat.controls.append(processing_text) + page.update() + chat_completion = client.chat.completions.create( + messages=[ + { + "role": "user", + "content": user_message, + } + ], + model="llama-3.1-70b-versatile", + ) + response_text = chat_completion.choices[0].message.content + message = Message(user=page.session_id, text=user_message, response_text=response_text) + page.pubsub.send_all(message) + new_message.value = "" + page.update() + + page.add( + ft.Container( + content=ft.Column([ + chat, + ft.Row([new_message, ft.ElevatedButton("Send", on_click=send_click)]) + ]), + expand=True, + padding=10 + ) + ) + +ft.app(target=main)