Skip to content

Commit

Permalink
run
Browse files Browse the repository at this point in the history
  • Loading branch information
leej3 committed Aug 29, 2024
1 parent eddd426 commit 7f1402a
Show file tree
Hide file tree
Showing 6 changed files with 60 additions and 53 deletions.
2 changes: 1 addition & 1 deletion Dockerfile.base
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ RUN mkdir -p /opt/osm
RUN pip install uv fastapi[standard] uvicorn

# LLM stuff
RUN pip install llama-index llama-index-llms-openai llama-index-program-openai
RUN pip install llama-index llama-index-llms-openai llama-index-program-openai llama-index-llms-openrouter

# Dashboard stuff
RUN pip install colorcet panel
Expand Down
79 changes: 40 additions & 39 deletions compose.development.override.yaml
Original file line number Diff line number Diff line change
@@ -1,56 +1,57 @@
name: local-osm
services:
rtransparent:
container_name: rtransparent
build:
context: .
dockerfile: ./external_components/rtransparent/Dockerfile
volumes:
- ./external_components/rtransparent:/app
# rtransparent:
# container_name: rtransparent
# build:
# context: .
# dockerfile: ./external_components/rtransparent/Dockerfile
# volumes:
# - ./external_components/rtransparent:/app

llm_extraction:
container_name: llm_extraction
environment:
- OPENAI_API_KEY=${OPENAI_API_KEY:-NOKEY}
- OPENROUTER_API_KEY=${OPENROUTER_API_KEY:-NOKEY}
build:
context: .
dockerfile: ./external_components/llm_extraction/Dockerfile
volumes:
- ./external_components/llm_extraction:/app

web_api:
container_name: web_api
environment:
- MONGODB_URI=mongodb://db:27017/osm
build:
context: .
dockerfile: ./web/api/Dockerfile
ports:
- 80:80
volumes:
- ./web/api:/app/app
- ./osm:/opt/osm/osm
working_dir: /app/app
command: ["fastapi","dev","--host","0.0.0.0","--port","80"]
depends_on:
- db
# web_api:
# container_name: web_api
# environment:
# - MONGODB_URI=mongodb://db:27017/osm
# build:
# context: .
# dockerfile: ./web/api/Dockerfile
# ports:
# - 80:80
# volumes:
# - ./web/api:/app/app
# - ./osm:/opt/osm/osm
# working_dir: /app/app
# command: ["fastapi","dev","--host","0.0.0.0","--port","80"]
# depends_on:
# - db

dashboard:
container_name: dashboard
environment:
- MONGODB_URI=mongodb://db:27017/osm
build:
context: .
dockerfile: ./web/dashboard/Dockerfile
ports:
- "8501:8501"
volumes:
- ./web/dashboard:/app
- ./osm:/opt/osm/osm
working_dir: /app
command: ["python", "app.py"]
depends_on:
- db
# dashboard:
# container_name: dashboard
# environment:
# - MONGODB_URI=mongodb://db:27017/osm
# build:
# context: .
# dockerfile: ./web/dashboard/Dockerfile
# ports:
# - "8501:8501"
# volumes:
# - ./web/dashboard:/app
# - ./osm:/opt/osm/osm
# working_dir: /app
# command: ["python", "app.py"]
# depends_on:
# - db

db:
container_name: db
Expand Down
10 changes: 5 additions & 5 deletions compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ services:
image: elifesciences/sciencebeam-parser
ports:
- "8070:8070"
rtransparent:
container_name: rtransparent
image: nimhdsst/rtransparent:staging
ports:
- "8071:8071"
# rtransparent:
# container_name: rtransparent
# image: nimhdsst/rtransparent:staging
# ports:
# - "8071:8071"
llm_extraction:
container_name: llm_extraction
image: nimhdsst/llm_extraction:staging
Expand Down
11 changes: 9 additions & 2 deletions external_components/llm_extraction/app.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import os

from fastapi import FastAPI, File, HTTPException, Query, UploadFile
from llama_index.core import ChatPromptTemplate
Expand All @@ -9,8 +10,14 @@

# from pydantic import BaseModel, Field
from osm.schemas.metrics_schemas import LLMExtractorMetrics

LLM_MODELS = {"gpt-4o-2024-08-06": OpenAI(model="gpt-4o-2024-08-06")}
from llama_index.llms.openrouter import OpenRouter
LLM_MODELS = {
# "gpt-4o-2024-08-06": OpenRouter(
# api_key=os.environ["OPENROUTER_API_KEY"],
# model="openai/chatgpt-4o-latest",
# )
"gpt-4o-2024-08-06": OpenAI(model="gpt-4o-2024-08-06")
}


logger = logging.getLogger(__name__)
Expand Down
10 changes: 5 additions & 5 deletions osm/pipeline/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,11 +110,11 @@ def run(self, user_managed_compose: bool = False, llm_model: str = None):
extracted_metrics = extractor.run(
parsed_data, parser=parser.name, llm_model=llm_model
)
self.savers.save_osm(
data=self.file_data,
metrics=extracted_metrics,
components=[parser, extractor, *self.savers],
)
# self.savers.save_osm(
# data=self.file_data,
# metrics=extracted_metrics,
# components=[parser, extractor, *self.savers],
# )
self.savers.save_json(extracted_metrics, self.metrics_path)

@staticmethod
Expand Down
1 change: 0 additions & 1 deletion osm/pipeline/savers.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,6 @@ def _run(self, data: bytes, metrics: dict, components: list[schemas.Component]):
print(f"Using OSM API: {osm_api}")
# Build the payload
schemas = get_metrics_schemas()
breakpoint()
try:
payload = {
"osm_version": __version__,
Expand Down

0 comments on commit 7f1402a

Please sign in to comment.