From d16135c166de000396b1558bf483b06db7cd654f Mon Sep 17 00:00:00 2001 From: LuoShui Date: Wed, 27 Mar 2024 11:55:07 +0800 Subject: [PATCH] :bug: FIx chat validation about new chatgpt --- crates/openai/src/serve/mod.rs | 14 +++++++----- crates/openai/src/serve/proxy/req.rs | 32 ++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/crates/openai/src/serve/mod.rs b/crates/openai/src/serve/mod.rs index 61cf86c5a..fe8ab5008 100644 --- a/crates/openai/src/serve/mod.rs +++ b/crates/openai/src/serve/mod.rs @@ -511,18 +511,22 @@ async fn check_wan_address() { } } - async fn arkos_static(path: Path) -> Result, ResponseError> { let client = with_context!(api_client); let data = client - .get(format!("{}/v2/{}", arkose::Type::GPT4.origin_url(), path.0.as_str())) + .get(format!( + "{}/v2/{}", + arkose::Type::GPT4.origin_url(), + path.0.as_str() + )) .send() .await?; - let mut builder = Response::builder() - .status(data.status()); + let mut builder = Response::builder().status(data.status()); for (key, value) in data.headers().iter() { builder = builder.header(key, value); } let content = data.bytes().await?; - builder.body(content.into()).map_err(ResponseError::InternalServerError) + builder + .body(content.into()) + .map_err(ResponseError::InternalServerError) } diff --git a/crates/openai/src/serve/proxy/req.rs b/crates/openai/src/serve/proxy/req.rs index 79f1f13fd..fd570904a 100644 --- a/crates/openai/src/serve/proxy/req.rs +++ b/crates/openai/src/serve/proxy/req.rs @@ -19,6 +19,7 @@ use super::header_convert; use super::toapi; use crate::serve::error::{ProxyError, ResponseError}; use crate::serve::puid::{get_or_init, reduce_key}; +use crate::URL_CHATGPT_API; #[async_trait] impl SendRequestExt for reqwest::Client { @@ -119,6 +120,15 @@ async fn handle_conv_request(req: &mut RequestExt) -> Result<(), ResponseError> } } + let chat_requirements_token = create_chat_requirements_token(&token).await?; + if let Some(chat_requirements_token) = chat_requirements_token { + req.headers.insert( + header::HeaderName::from_static("openai-sentinel-chat-requirements-token"), + header::HeaderValue::from_str(chat_requirements_token.as_str()) + .map_err(ResponseError::BadRequest)?, + ); + } + // Parse model let model = GPTModel::from_str(model).map_err(ResponseError::BadRequest)?; @@ -194,3 +204,25 @@ async fn handle_dashboard_request(req: &mut RequestExt) -> Result<(), ResponseEr Ok(()) } + +async fn create_chat_requirements_token(token: &str) -> Result, ResponseError> { + let token = token.trim_start_matches("Bearer "); + let resp = with_context!(api_client) + .post(format!( + "{URL_CHATGPT_API}/backend-api/sentinel/chat-requirements" + )) + .bearer_auth(token) + .send() + .await + .map_err(ResponseError::InternalServerError)? + .error_for_status() + .map_err(ResponseError::BadRequest)?; + let body = resp.bytes().await?; + let json = serde_json::from_slice::(&body).map_err(ResponseError::BadRequest)?; + if let Some(token_value) = json.get("token") { + if let Some(token_str) = token_value.as_str() { + return Ok(Some(token_str.to_owned())); + } + } + Ok(None) +}