diff --git a/README.md b/README.md index dfbf80a..9bb5ca1 100644 --- a/README.md +++ b/README.md @@ -70,7 +70,33 @@ pip install portkey-ai

-# Changelog +### Changelog + +All notable changes to this project will be documented in this file. Dates are displayed in UTC. + +Generated by [`auto-changelog`](https://github.com/CookPete/auto-changelog). + +#### [v0.1.45](https://github.com/Portkey-AI/portkey-python-sdk/compare/v0.1.44...v0.1.45) + +> 12 September 2023 + +- feat: Add support for azure models [`#9`](https://github.com/Portkey-AI/portkey-python-sdk/pull/9) +- docs: Added examples for azure-openai fallback and loadbalance [`#8`](https://github.com/Portkey-AI/portkey-python-sdk/pull/8) +- chore: Updating the documentation [`#7`](https://github.com/Portkey-AI/portkey-python-sdk/pull/7) +- chore: Updating the documentation [`#6`](https://github.com/Portkey-AI/portkey-python-sdk/pull/6) +- chore: Adding an example for ab-testing and loadbalance [`#5`](https://github.com/Portkey-AI/portkey-python-sdk/pull/5) + +#### v0.1.44 + +> 11 September 2023 + +- feat: added changie to generate changelogs [`#4`](https://github.com/Portkey-AI/portkey-python-sdk/pull/4) +- feat: version upgrade - 0.1.44 [`#3`](https://github.com/Portkey-AI/portkey-python-sdk/pull/3) +- feat: Workflow update [`cb80617`](https://github.com/Portkey-AI/portkey-python-sdk/commit/cb806173049d2a1f690935320e5ad4738910a452) +- fea: Initial Commit [`2c3631a`](https://github.com/Portkey-AI/portkey-python-sdk/commit/2c3631ac65ff58158695e84881993460fd27cb82) +- feat: adding the streaming capability into rubeus sdk [`f06e23b`](https://github.com/Portkey-AI/portkey-python-sdk/commit/f06e23bfa676995d578f64eff3401db917660742) + + We are excited to announce the **stable release** of the all-new **Portkey Python SDK**, version 0.1.44! This SDK makes it easier than ever to add production capabilities to your existing LLM systems with one line of change to your code. @@ -82,13 +108,6 @@ We are excited to announce the **stable release** of the all-new **Portkey Pytho - **Community Support**: [Join our growing community](https://discord.gg/QHJ3RgcvKT) of practitioners putting LLMs in production. Share ideas, resolve doubts, and collaborate on projects. -### Feedback and Contributions - -We welcome your feedback and contributions! Feel free to report issues, suggest enhancements, or submit pull requests on our [GitHub repository](https://github.com/Portkey-AI/portkey-python-sdk). - -Thank you for your support and enthusiasm for the Portkey Python SDK. We look forward to seeing the amazing projects you will build with it! - Happy coding! - The Portkey Team - diff --git a/portkey/__init__.py b/portkey/__init__.py index 8a9b527..84f6e56 100644 --- a/portkey/__init__.py +++ b/portkey/__init__.py @@ -14,6 +14,11 @@ Completions, Params, Config, + RetrySettings, + ChatCompletion, + ChatCompletionChunk, + TextCompletion, + TextCompletionChunk, ) from portkey.version import VERSION @@ -35,6 +40,11 @@ "ChatCompletions", "Completions", "Params", + "RetrySettings", + "ChatCompletion", + "ChatCompletionChunk", + "TextCompletion", + "TextCompletionChunk", "Config", "api_key", "base_url", diff --git a/portkey/api_resources/__init__.py b/portkey/api_resources/__init__.py index b7793d8..663e529 100644 --- a/portkey/api_resources/__init__.py +++ b/portkey/api_resources/__init__.py @@ -12,6 +12,11 @@ PortkeyResponse, Params, Config, + RetrySettings, + ChatCompletion, + ChatCompletionChunk, + TextCompletion, + TextCompletionChunk, ) from portkey.version import VERSION @@ -31,4 +36,9 @@ "Completions", "Params", "Config", + "RetrySettings", + "ChatCompletion", + "ChatCompletionChunk", + "TextCompletion", + "TextCompletionChunk", ] diff --git a/portkey/api_resources/utils.py b/portkey/api_resources/utils.py index 85503eb..6da99f1 100644 --- a/portkey/api_resources/utils.py +++ b/portkey/api_resources/utils.py @@ -89,7 +89,7 @@ class ApiType(str, Enum, metaclass=MetaEnum): CHAT_COMPLETION = "chat_completions" -ModesLiteral = Literal["fallback", "loadbalance", "single", "proxy"] +ModesLiteral = Literal["fallback", "ab_test", "single", "proxy"] class PortkeyApiPaths(Enum): @@ -200,13 +200,6 @@ class Constructs(BaseModel): class LLMOptions(Constructs, ConversationInput, ModelParams): - @validator("cache_age", always=True) - @classmethod - def parse_cache_age(cls, cache_age): - if cache_age is not None: - cache_age = f"max-age={cache_age}" - return cache_age - @validator("api_key", "virtual_key", always=False) @classmethod def parse_api_key(cls, api_key, values): @@ -219,6 +212,13 @@ def parse_api_key(cls, api_key, values): class ProviderOptions(Constructs): override_params: Optional[OverrideParams] = None + + @validator("cache_age", always=True) + @classmethod + def parse_cache_age(cls, cache_age): + if cache_age is not None: + cache_age = f"max-age={cache_age}" + return cache_age class RequestConfig(BaseModel): diff --git a/portkey/version.py b/portkey/version.py index 8aa56b2..06e020f 100644 --- a/portkey/version.py +++ b/portkey/version.py @@ -1 +1 @@ -VERSION = "0.1.47" +VERSION = "0.1.48"