From 445564a99bdfdd6791e0799e5f4abbf364d93cbb Mon Sep 17 00:00:00 2001 From: Jakob Date: Wed, 13 Mar 2024 13:41:11 -0700 Subject: [PATCH] commit --- litellm/llms/together_ai.py | 1 + litellm/utils.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/litellm/llms/together_ai.py b/litellm/llms/together_ai.py index 15ed29916ca0..6bed741c3015 100644 --- a/litellm/llms/together_ai.py +++ b/litellm/llms/together_ai.py @@ -128,6 +128,7 @@ def completion( optional_params[k] = v print_verbose(f"CUSTOM PROMPT DICT: {custom_prompt_dict}; model: {model}") + print(f"CUSTOM PROMPT DICT: {custom_prompt_dict}; model: {model}") if model in custom_prompt_dict: # check if the model has a registered custom prompt model_prompt_details = custom_prompt_dict[model] diff --git a/litellm/utils.py b/litellm/utils.py index 438d3836cf6d..8c11d093a427 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -3583,7 +3583,7 @@ def cost_per_token( model_cost_ref = litellm.model_cost model_with_provider = model if custom_llm_provider is not None: - model_with_provider = model + model_with_provider = custom_llm_provider + "/" + model if region_name is not None: model_with_provider_and_region = ( f"{custom_llm_provider}/{region_name}/{model}" @@ -3852,6 +3852,7 @@ def completion_cost( if ( "togethercomputer" in model or "together_ai" in model + or "together" in model or custom_llm_provider == "together_ai" ): # together ai prices based on size of llm