Skip to content

Commit

Permalink
Refactor LLM class to import Client from ollama and add chat function…
Browse files Browse the repository at this point in the history
…ality
  • Loading branch information
Sedrowow committed Sep 16, 2024
1 parent a8efb01 commit a194851
Showing 1 changed file with 14 additions and 3 deletions.
17 changes: 14 additions & 3 deletions app/llm.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
# app/llm.py
from pathlib import Path
from typing import Any
import ollama_python as ollama
import ollama
from ollama import Client

from models.factory import ModelFactory
from utils import local_info
Expand All @@ -19,7 +20,17 @@ def __init__(self):
context = self.read_context_txt_file()

self.model = ModelFactory.create_model(self.model_name, base_url, context)

client = Client(host='http://localhost:8000')
response = client.chat(model='llama3.1', messages=[
{
'role': 'user',
'content': 'Why is the sky blue?',
},
{
'role': 'assistant',
'content': 'The sky is blue because of Rayleigh scattering.'
}
])
def get_settings_values(self) -> tuple[str, str]:
model_name = self.settings_dict.get('model')
if not model_name:
Expand All @@ -38,7 +49,7 @@ def switch_model(self, model_name: str):
self.model = ModelFactory.create_model(self.model_name, context)

def download_model(self, model_name: str):
if isinstance(self.model, ollama.OllamaModel):
if isinstance(self.model, ollama.list):
self.model.download_model(model_name)

def read_context_txt_file(self) -> str:
Expand Down

0 comments on commit a194851

Please sign in to comment.