From 422e81cd940169152affe8f445d8ec3faf3121f1 Mon Sep 17 00:00:00 2001 From: noble-varghese Date: Tue, 29 Aug 2023 16:28:32 +0530 Subject: [PATCH 1/8] feat: adding the pyproject file and updating the setup.cfg --- Makefile | 10 ++++++++++ pyproject.toml | 3 +++ setup.cfg | 11 ++--------- 3 files changed, 15 insertions(+), 9 deletions(-) create mode 100644 pyproject.toml diff --git a/Makefile b/Makefile index 4800823..0c504d5 100644 --- a/Makefile +++ b/Makefile @@ -13,3 +13,13 @@ test: ## Run tests pytest tests watch-docs: ## Build and watch documentation sphinx-autobuild docs/ docs/_build/html --open-browser --watch $(GIT_ROOT)/llama_index/ + +build: + rm -rf dist/ build/ + python -m pip install build + python -m build . + +upload: + python -m pip install twine + python -m twine upload dist/rubeus-* + rm -rf dist \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..7fd26b9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index e77bf76..4997726 100644 --- a/setup.cfg +++ b/setup.cfg @@ -6,7 +6,7 @@ long_description = file: README.md long_description_content_type = text/markdown author = Portkey.ai author_email = support@portkey.ai -url = https://github.com/Portkey-AI +url = https://github.com/Portkey-AI/rubeus-python-sdk license_files = LICENSE classifiers = Programming Language :: Python :: 3 @@ -21,19 +21,12 @@ include_package_data = True install_requires = httpx -[options.extras_require] -dev = - black ~= 21.6b0 - pytest == 6.* - pytest-asyncio - pytest-mock - [options.entry_points] console_scripts = rubeus = rubeus._rubeus_scripts:main [options.package_data] - openai = py.typed + rubeus = py.typed [options.packages.find] exclude = From ada02479855ccb91aeb1c6aaa7da6e50b67b4221 Mon Sep 17 00:00:00 2001 From: Noble Varghese Date: Tue, 29 Aug 2023 16:29:21 +0530 Subject: [PATCH 2/8] Create python-publish.yml --- .github/workflows/python-publish.yml | 39 ++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 .github/workflows/python-publish.yml diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml new file mode 100644 index 0000000..7404108 --- /dev/null +++ b/.github/workflows/python-publish.yml @@ -0,0 +1,39 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Upload Python Package + +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v3 + with: + python-version: '3.9' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: make build + - name: Publish package + uses: make upload + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} From 056e5abb04b897af6c2f791ff5c50b86ddaa4113 Mon Sep 17 00:00:00 2001 From: noble-varghese Date: Tue, 29 Aug 2023 16:42:12 +0530 Subject: [PATCH 3/8] feat: adding make command for uploads --- .github/workflows/python-publish.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 7404108..7ccf45e 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -33,7 +33,7 @@ jobs: - name: Build package run: make build - name: Publish package - uses: make upload - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} + run: make upload + env: + TWINE_USERNAME: "__token__" + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} From 4043cc19050f96c931ae171fbc0fe2c3f1262dea Mon Sep 17 00:00:00 2001 From: noble-varghese Date: Tue, 29 Aug 2023 19:26:06 +0530 Subject: [PATCH 4/8] fix: type fixes with latest pydantic version --- rubeus/api_resources/apis.py | 4 ++-- rubeus/api_resources/base_client.py | 3 ++- rubeus/api_resources/utils.py | 12 ++++++------ setup.cfg | 5 +++++ 4 files changed, 15 insertions(+), 9 deletions(-) diff --git a/rubeus/api_resources/apis.py b/rubeus/api_resources/apis.py index ccf33e9..6537c86 100644 --- a/rubeus/api_resources/apis.py +++ b/rubeus/api_resources/apis.py @@ -110,11 +110,11 @@ def create( self, *, messages: List[Message], - timeout: Union[float, None] = DEFAULT_TIMEOUT, - max_retries: int = DEFAULT_MAX_RETRIES, provider: ProviderTypes = ProviderTypes.OPENAI, model: str = "gpt-3.5-turbo", model_api_key: str = "", + timeout: Union[float, None] = DEFAULT_TIMEOUT, + max_retries: int = DEFAULT_MAX_RETRIES, temperature: float = 0.1, top_k: Optional[int] = None, top_p: Optional[float] = None, diff --git a/rubeus/api_resources/base_client.py b/rubeus/api_resources/base_client.py index 0bd2c96..a66b037 100644 --- a/rubeus/api_resources/base_client.py +++ b/rubeus/api_resources/base_client.py @@ -178,7 +178,8 @@ def _request( except Exception as err: raise APIConnectionError(request=request) from err response = cast( - RubeusResponse, BaseModel.construct(**res.json(), raw_body=res.json()) + RubeusResponse, + RubeusResponse.model_construct(**res.json(), raw_body=res.json()), ) return response diff --git a/rubeus/api_resources/utils.py b/rubeus/api_resources/utils.py index b602ee1..d685c5e 100644 --- a/rubeus/api_resources/utils.py +++ b/rubeus/api_resources/utils.py @@ -66,14 +66,14 @@ class RubeusApiPaths(Enum): class Options(BaseModel): method: str url: str - params: Optional[Mapping[str, str]] - headers: Optional[Mapping[str, str]] - max_retries: Optional[int] - timeout: Optional[Union[float, None]] + params: Optional[Mapping[str, str]] = None + headers: Optional[Mapping[str, str]] = None + max_retries: Optional[int] = None + timeout: Optional[float] = None # stringified json - data: Optional[Mapping[str, Any]] + data: Optional[Mapping[str, Any]] = None # json structure - json_body: Optional[Mapping[str, Any]] + json_body: Optional[Mapping[str, Any]] = None class OverrideParams(BaseModel): diff --git a/setup.cfg b/setup.cfg index 4997726..3ea985b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -28,6 +28,11 @@ console_scripts = [options.package_data] rubeus = py.typed +[options.extras_require] +dev = + mypy + black ~= 21.6b0 + [options.packages.find] exclude = tests From 54663f7485cf5bea8d67e76ba0e20a042480e096 Mon Sep 17 00:00:00 2001 From: noble-varghese Date: Tue, 29 Aug 2023 22:37:15 +0530 Subject: [PATCH 5/8] fix: Updating the pydantic validation and removing the warning error messages --- examples/demo.py | 4 +-- rubeus/api_resources/apis.py | 9 +++-- rubeus/api_resources/base_client.py | 8 ++--- rubeus/api_resources/utils.py | 51 ++++++++++++++++++++++++++--- 4 files changed, 56 insertions(+), 16 deletions(-) diff --git a/examples/demo.py b/examples/demo.py index c9d58a4..49dce55 100644 --- a/examples/demo.py +++ b/examples/demo.py @@ -14,8 +14,8 @@ openai_llm = LLMBase( provider="openai", - model="gpt-3.5-turbo", + model="gpt-3.5-turbo" ) res = client.chat_completion.with_fallbacks(llms=[openai_llm]) -print(res.json()) +print(res) diff --git a/rubeus/api_resources/apis.py b/rubeus/api_resources/apis.py index 6537c86..e85a27a 100644 --- a/rubeus/api_resources/apis.py +++ b/rubeus/api_resources/apis.py @@ -39,7 +39,7 @@ def create( max_retries: int = DEFAULT_MAX_RETRIES, provider: Union[ProviderTypes, ProviderTypesLiteral] = ProviderTypes.OPENAI, model: str = "gpt-3.5-turbo", - model_api_key: str = "", + api_key: str = "", temperature: float = 0.1, top_k: Optional[int] = None, top_p: Optional[float] = None, @@ -52,14 +52,13 @@ def create( metadata: Optional[Dict[str, Any]] = None, weight: Optional[float] = 1.0, ) -> RubeusResponse: - model_api_key = apikey_from_env(provider) llm = Body( prompt=prompt, timeout=timeout, max_retries=max_retries, provider=provider, model=model, - model_api_key=model_api_key, + api_key=api_key, temperature=temperature, top_k=top_k, top_p=top_p, @@ -112,7 +111,7 @@ def create( messages: List[Message], provider: ProviderTypes = ProviderTypes.OPENAI, model: str = "gpt-3.5-turbo", - model_api_key: str = "", + api_key: str = "", timeout: Union[float, None] = DEFAULT_TIMEOUT, max_retries: int = DEFAULT_MAX_RETRIES, temperature: float = 0.1, @@ -134,7 +133,7 @@ def create( max_retries=max_retries, provider=provider, model=model, - model_api_key=model_api_key, + api_key=api_key, temperature=temperature, top_k=top_k, top_p=top_p, diff --git a/rubeus/api_resources/base_client.py b/rubeus/api_resources/base_client.py index a66b037..a889c42 100644 --- a/rubeus/api_resources/base_client.py +++ b/rubeus/api_resources/base_client.py @@ -74,11 +74,11 @@ def post( def _construct( self, *, method: str, url: str, body: List[Body], mode: str ) -> Options: - opts = Options.construct() + opts = Options.model_construct() opts.method = method opts.url = url json_body = { - "config": self._config(mode, body).dict(), + "config": self._config(mode, body).model_dump(), "params": self._custom_params, } opts.json_body = remove_empty_values(json_body) @@ -88,10 +88,10 @@ def _construct( def _config(self, mode: str, body: List[Body]) -> Config: config = Config(mode=mode, options=[]) for i in body: - item = i.dict() + item = i.model_dump() options = ProviderOptions( provider=item.get("provider"), - apiKey=item.get("model_api_key"), + apiKey=item.get("api_key"), weight=item.get("weight"), retry=item.get("retry"), override_params=item.get("override_params"), diff --git a/rubeus/api_resources/utils.py b/rubeus/api_resources/utils.py index d685c5e..cdf5dc0 100644 --- a/rubeus/api_resources/utils.py +++ b/rubeus/api_resources/utils.py @@ -148,12 +148,11 @@ class LLMBase(BaseModel): (default: {}). weight (Optional[float]): The weight of the LLM in the ensemble (default: 1.0). """ - prompt: Optional[str] = None messages: Optional[List[Message]] = None provider: Union[ProviderTypes, ProviderTypesLiteral] model: str - model_api_key: str + api_key: str temperature: Optional[float] = None max_tokens: Optional[int] = None max_retries: Optional[int] = None @@ -180,9 +179,51 @@ class LLMBase(BaseModel): # logit_bias: Optional[Dict[str, int]] # user: Optional[str] - def __init__(self, **kwargs): - kwargs["model_api_key"] = str(apikey_from_env(kwargs.get("provider", ""))) - super().__init__(**kwargs) + def __init__( + self, + *, + prompt: Optional[str] = None, + messages: Optional[List[Message]] = None, + provider: Union[ProviderTypes, ProviderTypesLiteral], + model: str, + api_key: Optional[str] = None, + temperature: Optional[float] = None, + max_tokens: Optional[int] = None, + max_retries: Optional[int] = None, + trace_id: Optional[str] = None, + cache_status: Optional[RubeusCacheType] = None, + cache: Optional[bool] = None, + metadata: Optional[Dict[str, Any]] = None, + weight: Optional[float] = None, + top_k: Optional[int] = None, + top_p: Optional[float] = None, + stop_sequences: Optional[List[str]] = None, + stream: Optional[bool] = False, + timeout: Union[float, None] = None, + retry_settings: Optional[RetrySettings] = None, + ): + api_key = api_key or apikey_from_env(provider) + super().__init__( + prompt=prompt, + messages=messages, + provider=provider, + model=model, + api_key=api_key, + temperature=temperature, + max_tokens=max_tokens, + max_retries=max_retries, + trace_id=trace_id, + cache_status=cache_status, + cache=cache, + metadata=metadata, + weight=weight, + top_k=top_k, + top_p=top_p, + stop_sequences=stop_sequences, + stream=stream, + timeout=timeout, + retry_settings=retry_settings, + ) class Body(LLMBase): From ef313ccae072f355884a3d7dcc897c5e27d3c04f Mon Sep 17 00:00:00 2001 From: noble-varghese Date: Tue, 29 Aug 2023 23:38:06 +0530 Subject: [PATCH 6/8] fix: exporting more methods in rubeus + minor version upgrade --- examples/demo.py | 5 +---- rubeus/__init__.py | 26 ++++++++++++++++++++++++-- rubeus/api_resources/__init__.py | 26 ++++++++++++++++++++++++-- rubeus/api_resources/utils.py | 1 + rubeus/version.py | 2 +- 5 files changed, 51 insertions(+), 9 deletions(-) diff --git a/examples/demo.py b/examples/demo.py index 49dce55..92363a3 100644 --- a/examples/demo.py +++ b/examples/demo.py @@ -12,10 +12,7 @@ }, ) -openai_llm = LLMBase( - provider="openai", - model="gpt-3.5-turbo" -) +openai_llm = LLMBase(provider="openai", model="gpt-3.5-turbo") res = client.chat_completion.with_fallbacks(llms=[openai_llm]) print(res) diff --git a/rubeus/__init__.py b/rubeus/__init__.py index 0b26c10..9d48cef 100644 --- a/rubeus/__init__.py +++ b/rubeus/__init__.py @@ -1,5 +1,27 @@ -from rubeus.api_resources import Rubeus, LLMBase +from rubeus.api_resources import ( + Rubeus, + LLMBase, + RubeusModes, + RubeusModesLiteral, + ProviderTypes, + ProviderTypesLiteral, + RubeusCacheType, + RubeusCacheLiteral, + Message, + RubeusResponse, +) from rubeus.version import VERSION __version__ = VERSION -__all__ = ["Rubeus", "LLMBase"] +__all__ = [ + "Rubeus", + "LLMBase", + "RubeusModes", + "RubeusResponse", + "RubeusModesLiteral", + "ProviderTypes", + "ProviderTypesLiteral", + "RubeusCacheType", + "RubeusCacheLiteral", + "Message", +] diff --git a/rubeus/api_resources/__init__.py b/rubeus/api_resources/__init__.py index e64939d..83b0cd7 100644 --- a/rubeus/api_resources/__init__.py +++ b/rubeus/api_resources/__init__.py @@ -1,7 +1,29 @@ """""" from .client import Rubeus -from .utils import LLMBase +from .utils import ( + RubeusModes, + RubeusModesLiteral, + LLMBase, + ProviderTypes, + ProviderTypesLiteral, + RubeusCacheType, + RubeusCacheLiteral, + Message, + RubeusResponse, +) + from rubeus.version import VERSION __version__ = VERSION -__all__ = ["Rubeus", "LLMBase"] +__all__ = [ + "Rubeus", + "LLMBase", + "RubeusModes", + "RubeusResponse", + "RubeusModesLiteral", + "ProviderTypes", + "ProviderTypesLiteral", + "RubeusCacheType", + "RubeusCacheLiteral", + "Message", +] diff --git a/rubeus/api_resources/utils.py b/rubeus/api_resources/utils.py index cdf5dc0..782c8ff 100644 --- a/rubeus/api_resources/utils.py +++ b/rubeus/api_resources/utils.py @@ -148,6 +148,7 @@ class LLMBase(BaseModel): (default: {}). weight (Optional[float]): The weight of the LLM in the ensemble (default: 1.0). """ + prompt: Optional[str] = None messages: Optional[List[Message]] = None provider: Union[ProviderTypes, ProviderTypesLiteral] diff --git a/rubeus/version.py b/rubeus/version.py index 7d7c7c3..6d1f1c8 100644 --- a/rubeus/version.py +++ b/rubeus/version.py @@ -1 +1 @@ -VERSION = "0.1.1" +VERSION = "0.1.3" From 6e1259bae2f4d6d53c29635eb4fc931bd8331701 Mon Sep 17 00:00:00 2001 From: noble-varghese Date: Wed, 30 Aug 2023 01:58:41 +0530 Subject: [PATCH 7/8] fix: minor fixes in types --- rubeus/api_resources/base_client.py | 5 ++--- rubeus/version.py | 2 +- setup.cfg | 2 +- 3 files changed, 4 insertions(+), 5 deletions(-) diff --git a/rubeus/api_resources/base_client.py b/rubeus/api_resources/base_client.py index a889c42..79ccf89 100644 --- a/rubeus/api_resources/base_client.py +++ b/rubeus/api_resources/base_client.py @@ -12,7 +12,6 @@ ProviderOptions, RubeusResponse, ) -from pydantic import BaseModel from .exceptions import ( APIStatusError, BadRequestError, @@ -100,7 +99,7 @@ def _config(self, mode: str, body: List[Body]) -> Config: return config @property - def _default_headers(self) -> dict[str, str]: + def _default_headers(self) -> Mapping[str, str]: return { "Content-Type": "application/json", "x-portkey-api-key": self.api_key, @@ -141,7 +140,7 @@ def __enter__(self: Any) -> Any: def __exit__( self, - exc_type: Optional[type[BaseException]], + exc_type: Optional[BaseException], exc: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: diff --git a/rubeus/version.py b/rubeus/version.py index 6d1f1c8..8d91de2 100644 --- a/rubeus/version.py +++ b/rubeus/version.py @@ -1 +1 @@ -VERSION = "0.1.3" +VERSION = "0.1.7" diff --git a/setup.cfg b/setup.cfg index 3ea985b..9e03ee2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -15,7 +15,7 @@ classifiers = [options] packages = find: -python_requires = >=3.9 +python_requires = >=3.8 zip_safe = True include_package_data = True install_requires = From 876f02b02d878163f591b1b0be19adfaad7cd9e2 Mon Sep 17 00:00:00 2001 From: noble-varghese Date: Wed, 30 Aug 2023 02:02:26 +0530 Subject: [PATCH 8/8] fix: minor version update --- rubeus/api_resources/base_client.py | 8 ++++---- rubeus/version.py | 2 +- setup.cfg | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/rubeus/api_resources/base_client.py b/rubeus/api_resources/base_client.py index 79ccf89..f20adbb 100644 --- a/rubeus/api_resources/base_client.py +++ b/rubeus/api_resources/base_client.py @@ -73,11 +73,11 @@ def post( def _construct( self, *, method: str, url: str, body: List[Body], mode: str ) -> Options: - opts = Options.model_construct() + opts = Options.construct() opts.method = method opts.url = url json_body = { - "config": self._config(mode, body).model_dump(), + "config": self._config(mode, body).dict(), "params": self._custom_params, } opts.json_body = remove_empty_values(json_body) @@ -87,7 +87,7 @@ def _construct( def _config(self, mode: str, body: List[Body]) -> Config: config = Config(mode=mode, options=[]) for i in body: - item = i.model_dump() + item = i.dict() options = ProviderOptions( provider=item.get("provider"), apiKey=item.get("api_key"), @@ -178,7 +178,7 @@ def _request( raise APIConnectionError(request=request) from err response = cast( RubeusResponse, - RubeusResponse.model_construct(**res.json(), raw_body=res.json()), + RubeusResponse.construct(**res.json(), raw_body=res.json()), ) return response diff --git a/rubeus/version.py b/rubeus/version.py index 8d91de2..9bef2a6 100644 --- a/rubeus/version.py +++ b/rubeus/version.py @@ -1 +1 @@ -VERSION = "0.1.7" +VERSION = "0.1.8" diff --git a/setup.cfg b/setup.cfg index 9e03ee2..28fe439 100644 --- a/setup.cfg +++ b/setup.cfg @@ -30,8 +30,8 @@ console_scripts = [options.extras_require] dev = - mypy - black ~= 21.6b0 + mypy == 0.991 + black == 23.7.0 [options.packages.find] exclude =