diff --git a/.gitignore b/.gitignore index d953537..7769527 100644 --- a/.gitignore +++ b/.gitignore @@ -315,5 +315,4 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ - -/cache-directory/ +.env diff --git a/README.md b/README.md index 1f44ecd..8f71ca5 100644 --- a/README.md +++ b/README.md @@ -3,3 +3,35 @@ ## License [BSD 2-Clause License](https://opensource.org/license/bsd-2-clause) + +## Local dev with docker compose + +### Setup + +To set up a local development environment, first clone the repository. + +Now, set the required environment variables in a `.env` file (n.b. the leading '.' is important). + +```bash +cp .env.template .env # to create a new .env file +nano .env # to edit the file +``` + +Then run the following command to bring up the services: + +```bash +docker compose up -d # to start the services in the background +docker compose logs -f # to see the logs +``` + +### Accessing Fuseki + +The Fuseki server is available at `http://localhost:3030/`. + +The default username and password are `admin` and `admin` respectively. + +### Accessing FastAPI + +An API for depositing ROCrates is available at `http://localhost:8000/`. + +Documentation can be seen at `http://localhost:8000/docs`. \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..edc2a88 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,33 @@ +services: + fuseki: + image: stain/jena-fuseki + container_name: ${PROJECT_NAME}-fuseki + ports: + - "3030:3030" + environment: + ADMIN_PASSWORD: admin + FUSEKI_DATASET_1: wfh + volumes: + - jena-data:/fuseki + networks: + - wfh + + fastapi: + image: wfh + container_name: ${PROJECT_NAME}-fastapi + build: + context: upload-crate + dockerfile: Dockerfile + ports: + - "8000:80" + environment: + - FUSEKI_URL=http://${PROJECT_NAME}-fuseki:3030 + networks: + - wfh + +volumes: + jena-data: + +networks: + wfh: + diff --git a/env.template b/env.template new file mode 100644 index 0000000..e4ef677 --- /dev/null +++ b/env.template @@ -0,0 +1 @@ +PROJECT_NAME=nameofproduct \ No newline at end of file diff --git a/upload-crate/Dockerfile b/upload-crate/Dockerfile new file mode 100644 index 0000000..2bbaf55 --- /dev/null +++ b/upload-crate/Dockerfile @@ -0,0 +1,11 @@ +FROM python:3.10 + +# Install fastapi omnicli wrapper + +WORKDIR /code +COPY ./requirements.txt /code/requirements.txt + +RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt +COPY ./app /code/app + +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "80", "--reload"] diff --git a/upload-crate/app/__init__.py b/upload-crate/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/upload-crate/app/main.py b/upload-crate/app/main.py new file mode 100644 index 0000000..3cd6e89 --- /dev/null +++ b/upload-crate/app/main.py @@ -0,0 +1,131 @@ +import pexpect +import json +import ontospy +import requests +import os +import uuid + +from contextlib import asynccontextmanager +from llama_index.llms.ollama import Ollama +from llama_index.core.llms import ChatMessage + +from fastapi import FastAPI, HTTPException, File, UploadFile, Request + +# ----------------------------------------------------------------------------- +# Constants + +FUSEKI_URL = os.getenv("FUSEKI_URL") + +# ----------------------------------------------------------------------------- +# Lifespan with Ollama model + +ollama = {} + +@asynccontextmanager +async def lifespan(app: FastAPI): + # Create an Ollama instance + ollama['model'] = Ollama( + model="llama3", + base_url="http://randall-ollama:11434", + request_timeout=30.0, + #json_mode=True + ) + yield + + ollama['model'].clear() + +# ----------------------------------------------------------------------------- +# Session + +app = FastAPI(lifespan=lifespan) + +# ----------------------------------------------------------------------------- +# Routes + +# Route which accepts a json payload and uploads it to the triplestore +@app.post("/upload/message") +async def upload_message(request: Request): + data = await request.json() + + # Extract the triples from the data + triples_n3 = [] + for metric in data['metrics']: + # Randomly generate unique identifier for the message + message_id = f"{uuid.uuid4()}" + + # Deconstruct topic into series of linked triples + topic = metric['tags']['topic'] + topic_parts = topic.split('/') + + # Head triple (a message was recorded at timestamp) + triples_n3.append(f'<{topic}> <{metric["timestamp"]}> .') + triples_n3.append(f'<{metric["timestamp"]}> <{message_id}> .') + + # Value triple + triples_n3.append(f'<{message_id}> <{metric["fields"]["value"]}> .') + + # Upload the triples to the triplestore + for triple in triples_n3: + try: + query = f"update=insert data {{ {triple} }}" + req = requests.post(f"{FUSEKI_URL}/asdf/update", + data=str(query), + auth=('admin', 'admin',), + headers={'Content-Type': 'application/x-www-form-urlencoded'} + ) + except requests.exceptions.RequestException as e: + print(f"Triple: {triple} caused an error!") + print(e) + return {"error": e} + + return + +@app.post("/upload/crate") +def upload_crate(file: UploadFile): + fileString = file.file.read() + model = ontospy.Ontospy(data=fileString, + rdf_format="json-ld", + verbose=False, + hide_implicit_types=False, + hide_base_schemas=False, + hide_implicit_preds=False, + hide_individuals=False + ) + + # Extract the triples from the model + triples = model.query("SELECT ?s ?p ?o WHERE { ?s ?p ?o }") + print(triples) + + # Upload the triples to the triplestore + for triple in triples: + # Convert to n3 format + triple_n3 = f'<{triple[0]}> <{triple[1]}> "{triple[2]}" .' + + # Add to the query + query = f"update=insert data {{ {triple_n3} }}" + + try: + req = requests.post(f"{FUSEKI_URL}/asdf/update", + data=str(query), + auth=('admin', 'admin',), + headers={'Content-Type': 'application/x-www-form-urlencoded'} + ) + except requests.exceptions.RequestException as e: + print(f"Triple: {triple} caused an error!") + print(e) + return {"error": e} + print(req.text) + + return {"filename": file.filename} + + +# ----------------------------------------------------------------------------- +# Chat endpoints + +@app.post("/chat") +def chat(request: Request, query: str): + # Get the answer to the query + response = ollama['model'].complete(query) + + return {"response": response} + diff --git a/upload-crate/app/test_api.py b/upload-crate/app/test_api.py new file mode 100644 index 0000000..00cadd2 --- /dev/null +++ b/upload-crate/app/test_api.py @@ -0,0 +1,24 @@ +from fastapi.testclient import TestClient + +from .main import app + +client = TestClient(app) + +################################################################################ +# Test correctly formed GET calls for endpoints defined in main.py + +def test_help(): + response = client.get("/omnicli/help/") + assert response.status_code == 200 + assert response.json() == {"output":" help : Print this help message\n quit : Quit the interactive terminal\n log : Change the log level\n list : List the contents of a folder\n stat : Print information about a specified file or folder\n cd : Makes paths relative to the specified folder\n push : Makes paths relative to the specified folder\n You can restore the original folder with pop\n pop : Restores a folder pushed with 'push'\n copy : Copies a file or folder from src to dst (overwrites dst)\n move : Moves a file or folder from src to dst (overwrites dst)\n del : Deletes the specified file or folder\n mkdir : Create a folder\n cat : Print the contents of a file\n cver : Print the client version\n rver : Print the USD Resolver Plugin version\n sver : Print the server version\n load : Load a USD file\n save [url] : Save a previously loaded USD file (optionally to a different URL)\n close : Close a previously loaded USD file\n lock [url] : Lock a USD file (defaults to loaded stage root)\n unlock [url] : Unlock a USD file (defaults to loaded stage root)\n getacls : Print the ACLs for a URL\n setacls : Change the ACLs for a user or group for a URL\n Specify '-' to remove that user|group from the ACLs\n auth [username] [password] : Set username/password for authentication\n Password defaults to username; blank reverts to standard auth\n checkpoint [comment] : Create a checkpoint of a URL\n listCheckpoints : List all checkpoints of a URL\n restoreCheckpoint : Restore a checkpoint\n disconnect : Disconnect from a server\n join : Join a channel. Only one channel can be joined at a time.\n send : Send a message to the joined channel.\n leave : Leave the joined channel\n"} + + +def test_log(): + response = client.get("/omnicli/log/") + assert response.status_code == 422 + assert response.json() == {"detail":[{"loc":["query","level"],"msg":"field required","type":"value_error.missing"}]} + + response = client.get("/omnicli/log/?level=info") + assert response.status_code == 200 + assert response.json() == {"output":"Log level set to info"} + diff --git a/upload-crate/requirements.txt b/upload-crate/requirements.txt new file mode 100644 index 0000000..5eeaa60 --- /dev/null +++ b/upload-crate/requirements.txt @@ -0,0 +1,8 @@ +fastapi +pydantic +uvicorn[standard] +pytest +requests +pexpect +python-multipart +ontospy