From dc29223d1d23ab836805f661ea316566b0f1f1ef Mon Sep 17 00:00:00 2001 From: Nico Arqueros <1622112+nicarq@users.noreply.github.com> Date: Wed, 18 Oct 2023 12:59:58 +0200 Subject: [PATCH] fix --- scripts/run_node1.sh | 2 +- .../tests/serialized_agent_conversion_tests.rs | 2 +- src/agent/providers/openai.rs | 16 ++++++++++++---- src/utils/environment.rs | 2 +- 4 files changed, 15 insertions(+), 7 deletions(-) diff --git a/scripts/run_node1.sh b/scripts/run_node1.sh index 7dc5a1d39..b79d386a4 100755 --- a/scripts/run_node1.sh +++ b/scripts/run_node1.sh @@ -15,7 +15,7 @@ export FIRST_DEVICE_NEEDS_REGISTRATION_CODE="false" export INITIAL_AGENT_NAME="my_gpt" export INITIAL_AGENT_URL="https://api.openai.com" -export INITIAL_AGENT_MODEL="openai:chatgpt3-turbo" +export INITIAL_AGENT_MODEL="openai:gpt-3.5-turbo" # Add these lines to enable all log options export LOG_ALL=1 diff --git a/shinkai-libs/shinkai-message-wasm/tests/serialized_agent_conversion_tests.rs b/shinkai-libs/shinkai-message-wasm/tests/serialized_agent_conversion_tests.rs index b08306932..6acededec 100644 --- a/shinkai-libs/shinkai-message-wasm/tests/serialized_agent_conversion_tests.rs +++ b/shinkai-libs/shinkai-message-wasm/tests/serialized_agent_conversion_tests.rs @@ -18,7 +18,7 @@ mod tests { "false".to_string(), "http://example.com".to_string(), "123456".to_string(), - "openai:chatgpt3-turbo".to_string(), + "openai:gpt-3.5-turbo".to_string(), "permission1,permission2".to_string(), "bucket1,bucket2".to_string(), "sender1,sender2".to_string(), diff --git a/src/agent/providers/openai.rs b/src/agent/providers/openai.rs index dbd39c4c6..b8b92bede 100644 --- a/src/agent/providers/openai.rs +++ b/src/agent/providers/openai.rs @@ -7,6 +7,7 @@ use serde_json; use serde_json::json; use serde_json::Value as JsonValue; use shinkai_message_primitives::schemas::agents::serialized_agent::OpenAI; +use shinkai_message_primitives::shinkai_utils::shinkai_logging::{shinkai_log, ShinkaiLogOption, ShinkaiLogLevel}; use tiktoken_rs::get_chat_completion_max_tokens; use tiktoken_rs::num_tokens_from_messages; @@ -103,14 +104,21 @@ impl LLMProvider for OpenAI { .send() .await?; - eprintln!("Status: {}", res.status()); + shinkai_log( + ShinkaiLogOption::JobExecution, + ShinkaiLogLevel::Debug, + format!("Call API Status: {:?}", res.status()).as_str(), + ); + let response_text = res.text().await?; - //eprintln!("Response: {:?}", response_text); + shinkai_log( + ShinkaiLogOption::JobExecution, + ShinkaiLogLevel::Debug, + format!("Call API Response Text: {:?}", response_text).as_str(), + ); let data_resp: Result = serde_json::from_str(&response_text); - //eprintln!("data_resp: {:?}", data_resp); - // let data_resp = res.json::().await; match data_resp { Ok(value) => { let data: Response = serde_json::from_value(value).map_err(AgentError::SerdeError)?; diff --git a/src/utils/environment.rs b/src/utils/environment.rs index bdfcb2cdb..06e393ee8 100644 --- a/src/utils/environment.rs +++ b/src/utils/environment.rs @@ -36,7 +36,7 @@ pub fn fetch_agent_env(global_identity: String) -> Option { let initial_agent_model: String = env::var("INITIAL_AGENT_MODEL") .unwrap_or_else(|_| "".to_string()) .parse() - .expect("Failed to parse agent model e.g. openai:chatgpt3-turbo"); + .expect("Failed to parse agent model e.g. openai:gpt-3.5-turbo"); if initial_agent_name.is_empty() || initial_agent_api_key.is_empty()