Skip to content

Commit

Permalink
Merge branch 'main' into vrushank
Browse files Browse the repository at this point in the history
  • Loading branch information
vrushankportkey authored Aug 30, 2023
2 parents 8725e6e + 876f02b commit 05d5e8f
Show file tree
Hide file tree
Showing 11 changed files with 174 additions and 42 deletions.
39 changes: 39 additions & 0 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries

# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

name: Upload Python Package

on:
release:
types: [published]

permissions:
contents: read

jobs:
deploy:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.9'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: make build
- name: Publish package
run: make upload
env:
TWINE_USERNAME: "__token__"
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
10 changes: 10 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,13 @@ test: ## Run tests
pytest tests
watch-docs: ## Build and watch documentation
sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/

build:
rm -rf dist/ build/
python -m pip install build
python -m build .

upload:
python -m pip install twine
python -m twine upload dist/rubeus-*
rm -rf dist
7 changes: 2 additions & 5 deletions examples/demo.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,7 @@
},
)

openai_llm = LLMBase(
provider="openai",
model="gpt-3.5-turbo",
)
openai_llm = LLMBase(provider="openai", model="gpt-3.5-turbo")
res = client.chat_completion.with_fallbacks(llms=[openai_llm])

print(res.json())
print(res)
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
26 changes: 24 additions & 2 deletions rubeus/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,27 @@
from rubeus.api_resources import Rubeus, LLMBase
from rubeus.api_resources import (
Rubeus,
LLMBase,
RubeusModes,
RubeusModesLiteral,
ProviderTypes,
ProviderTypesLiteral,
RubeusCacheType,
RubeusCacheLiteral,
Message,
RubeusResponse,
)
from rubeus.version import VERSION

__version__ = VERSION
__all__ = ["Rubeus", "LLMBase"]
__all__ = [
"Rubeus",
"LLMBase",
"RubeusModes",
"RubeusResponse",
"RubeusModesLiteral",
"ProviderTypes",
"ProviderTypesLiteral",
"RubeusCacheType",
"RubeusCacheLiteral",
"Message",
]
26 changes: 24 additions & 2 deletions rubeus/api_resources/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,29 @@
""""""
from .client import Rubeus
from .utils import LLMBase
from .utils import (
RubeusModes,
RubeusModesLiteral,
LLMBase,
ProviderTypes,
ProviderTypesLiteral,
RubeusCacheType,
RubeusCacheLiteral,
Message,
RubeusResponse,
)

from rubeus.version import VERSION

__version__ = VERSION
__all__ = ["Rubeus", "LLMBase"]
__all__ = [
"Rubeus",
"LLMBase",
"RubeusModes",
"RubeusResponse",
"RubeusModesLiteral",
"ProviderTypes",
"ProviderTypesLiteral",
"RubeusCacheType",
"RubeusCacheLiteral",
"Message",
]
13 changes: 6 additions & 7 deletions rubeus/api_resources/apis.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def create(
max_retries: int = DEFAULT_MAX_RETRIES,
provider: Union[ProviderTypes, ProviderTypesLiteral] = ProviderTypes.OPENAI,
model: str = "gpt-3.5-turbo",
model_api_key: str = "",
api_key: str = "",
temperature: float = 0.1,
top_k: Optional[int] = None,
top_p: Optional[float] = None,
Expand All @@ -52,14 +52,13 @@ def create(
metadata: Optional[Dict[str, Any]] = None,
weight: Optional[float] = 1.0,
) -> RubeusResponse:
model_api_key = apikey_from_env(provider)
llm = Body(
prompt=prompt,
timeout=timeout,
max_retries=max_retries,
provider=provider,
model=model,
model_api_key=model_api_key,
api_key=api_key,
temperature=temperature,
top_k=top_k,
top_p=top_p,
Expand Down Expand Up @@ -110,11 +109,11 @@ def create(
self,
*,
messages: List[Message],
timeout: Union[float, None] = DEFAULT_TIMEOUT,
max_retries: int = DEFAULT_MAX_RETRIES,
provider: ProviderTypes = ProviderTypes.OPENAI,
model: str = "gpt-3.5-turbo",
model_api_key: str = "",
api_key: str = "",
timeout: Union[float, None] = DEFAULT_TIMEOUT,
max_retries: int = DEFAULT_MAX_RETRIES,
temperature: float = 0.1,
top_k: Optional[int] = None,
top_p: Optional[float] = None,
Expand All @@ -134,7 +133,7 @@ def create(
max_retries=max_retries,
provider=provider,
model=model,
model_api_key=model_api_key,
api_key=api_key,
temperature=temperature,
top_k=top_k,
top_p=top_p,
Expand Down
10 changes: 5 additions & 5 deletions rubeus/api_resources/base_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
ProviderOptions,
RubeusResponse,
)
from pydantic import BaseModel
from .exceptions import (
APIStatusError,
BadRequestError,
Expand Down Expand Up @@ -91,7 +90,7 @@ def _config(self, mode: str, body: List[Body]) -> Config:
item = i.dict()
options = ProviderOptions(
provider=item.get("provider"),
apiKey=item.get("model_api_key"),
apiKey=item.get("api_key"),
weight=item.get("weight"),
retry=item.get("retry"),
override_params=item.get("override_params"),
Expand All @@ -100,7 +99,7 @@ def _config(self, mode: str, body: List[Body]) -> Config:
return config

@property
def _default_headers(self) -> dict[str, str]:
def _default_headers(self) -> Mapping[str, str]:
return {
"Content-Type": "application/json",
"x-portkey-api-key": self.api_key,
Expand Down Expand Up @@ -141,7 +140,7 @@ def __enter__(self: Any) -> Any:

def __exit__(
self,
exc_type: Optional[type[BaseException]],
exc_type: Optional[BaseException],
exc: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
Expand Down Expand Up @@ -178,7 +177,8 @@ def _request(
except Exception as err:
raise APIConnectionError(request=request) from err
response = cast(
RubeusResponse, BaseModel.construct(**res.json(), raw_body=res.json())
RubeusResponse,
RubeusResponse.construct(**res.json(), raw_body=res.json()),
)
return response

Expand Down
62 changes: 52 additions & 10 deletions rubeus/api_resources/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,14 +66,14 @@ class RubeusApiPaths(Enum):
class Options(BaseModel):
method: str
url: str
params: Optional[Mapping[str, str]]
headers: Optional[Mapping[str, str]]
max_retries: Optional[int]
timeout: Optional[Union[float, None]]
params: Optional[Mapping[str, str]] = None
headers: Optional[Mapping[str, str]] = None
max_retries: Optional[int] = None
timeout: Optional[float] = None
# stringified json
data: Optional[Mapping[str, Any]]
data: Optional[Mapping[str, Any]] = None
# json structure
json_body: Optional[Mapping[str, Any]]
json_body: Optional[Mapping[str, Any]] = None


class OverrideParams(BaseModel):
Expand Down Expand Up @@ -153,7 +153,7 @@ class LLMBase(BaseModel):
messages: Optional[List[Message]] = None
provider: Union[ProviderTypes, ProviderTypesLiteral]
model: str
model_api_key: str
api_key: str
temperature: Optional[float] = None
max_tokens: Optional[int] = None
max_retries: Optional[int] = None
Expand All @@ -180,9 +180,51 @@ class LLMBase(BaseModel):
# logit_bias: Optional[Dict[str, int]]
# user: Optional[str]

def __init__(self, **kwargs):
kwargs["model_api_key"] = str(apikey_from_env(kwargs.get("provider", "")))
super().__init__(**kwargs)
def __init__(
self,
*,
prompt: Optional[str] = None,
messages: Optional[List[Message]] = None,
provider: Union[ProviderTypes, ProviderTypesLiteral],
model: str,
api_key: Optional[str] = None,
temperature: Optional[float] = None,
max_tokens: Optional[int] = None,
max_retries: Optional[int] = None,
trace_id: Optional[str] = None,
cache_status: Optional[RubeusCacheType] = None,
cache: Optional[bool] = None,
metadata: Optional[Dict[str, Any]] = None,
weight: Optional[float] = None,
top_k: Optional[int] = None,
top_p: Optional[float] = None,
stop_sequences: Optional[List[str]] = None,
stream: Optional[bool] = False,
timeout: Union[float, None] = None,
retry_settings: Optional[RetrySettings] = None,
):
api_key = api_key or apikey_from_env(provider)
super().__init__(
prompt=prompt,
messages=messages,
provider=provider,
model=model,
api_key=api_key,
temperature=temperature,
max_tokens=max_tokens,
max_retries=max_retries,
trace_id=trace_id,
cache_status=cache_status,
cache=cache,
metadata=metadata,
weight=weight,
top_k=top_k,
top_p=top_p,
stop_sequences=stop_sequences,
stream=stream,
timeout=timeout,
retry_settings=retry_settings,
)


class Body(LLMBase):
Expand Down
2 changes: 1 addition & 1 deletion rubeus/version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
VERSION = "0.1.1"
VERSION = "0.1.8"
18 changes: 8 additions & 10 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ long_description = file: README.md
long_description_content_type = text/markdown
author = Portkey.ai
author_email = [email protected]
url = https://github.com/Portkey-AI
url = https://github.com/Portkey-AI/rubeus-python-sdk
license_files = LICENSE
classifiers =
Programming Language :: Python :: 3
Expand All @@ -15,25 +15,23 @@ classifiers =

[options]
packages = find:
python_requires = >=3.9
python_requires = >=3.8
zip_safe = True
include_package_data = True
install_requires =
httpx

[options.extras_require]
dev =
black ~= 21.6b0
pytest == 6.*
pytest-asyncio
pytest-mock

[options.entry_points]
console_scripts =
rubeus = rubeus._rubeus_scripts:main

[options.package_data]
openai = py.typed
rubeus = py.typed

[options.extras_require]
dev =
mypy == 0.991
black == 23.7.0

[options.packages.find]
exclude =
Expand Down

0 comments on commit 05d5e8f

Please sign in to comment.