From cb4b77d7b37eb893351e8f613ed75ec56b715ab4 Mon Sep 17 00:00:00 2001 From: Sethupathi Asokan Date: Wed, 9 Oct 2024 08:26:25 +0530 Subject: [PATCH] Update setup instructions --- service/README.md | 46 +++++++++++++++++++++++++++++++++++++++------- 1 file changed, 39 insertions(+), 7 deletions(-) diff --git a/service/README.md b/service/README.md index 3fdc012..faa0da0 100644 --- a/service/README.md +++ b/service/README.md @@ -1,14 +1,46 @@ -# Installations +# Setup Instructions +**Pre-requisite: Note: Following instructions a for linux, python 3.8.1 or above** -pip install uvicorn + sudo apt-get update + sudo apt-get install python3.8.1 -# Start Server +ffmpeg -source venv/bin/activate + sudo apt update && sudo apt install ffmpeg -cd service +Ollam -uvicorn main:app --host localhost --port 8000 --reload +For Linux: + + curl -fsSL https://ollama.com/install.sh | sh + +For Mac: + + https://ollama.com/download/Ollama-darwin.zip + +For Windows: + + https://ollama.com/download/OllamaSetup.exe + +Llama 3.1 model + + ollama run llama3.1 + +Setup Clone this github repository git clone + +Create python virtual environment + + python3 -m venv lingo . + +Activate the virtual environment + + source lingo/bin/activate + +Install dependencies + + pip install -r requirements.txt + + uvicorn main:app --host localhost --port 8000 --reload # Api endpoints @@ -39,4 +71,4 @@ errorStatusCode: 500 # exit virtual env -deactivate \ No newline at end of file +deactivate