From 8927ec31343cea0ef558a831d55a9a2229964ebb Mon Sep 17 00:00:00 2001 From: LuoShui Date: Wed, 27 Mar 2024 12:27:36 +0800 Subject: [PATCH] :bug: Fix chat validation about new chatgpt --- crates/openai/src/serve/mod.rs | 25 +++++++++++++++++++++++-- crates/openai/src/serve/proxy/mod.rs | 3 +++ crates/openai/src/serve/proxy/req.rs | 10 ++++++---- 3 files changed, 32 insertions(+), 6 deletions(-) diff --git a/crates/openai/src/serve/mod.rs b/crates/openai/src/serve/mod.rs index f54706dd2..fe8ab5008 100644 --- a/crates/openai/src/serve/mod.rs +++ b/crates/openai/src/serve/mod.rs @@ -51,7 +51,7 @@ use std::str::FromStr; use std::sync::Arc; use std::time::{Duration, SystemTime, UNIX_EPOCH}; use tower_http::trace; -use tracing::Level; +use tracing::{Instrument, Level}; use tracing_subscriber::prelude::__tracing_subscriber_SubscriberExt; use tracing_subscriber::util::SubscriberInitExt; @@ -185,7 +185,8 @@ impl Serve { .route("/auth/revoke_token", post(post_revoke_token)) .route("/auth/refresh_session", post(post_refresh_session)) .route("/auth/sess_token", post(post_sess_token)) - .route("/auth/billing", post(post_billing)); + .route("/auth/billing", post(post_billing)) + .route("/v2/*path", any(arkos_static)); let router = router::config( // Enable arkose token endpoint proxy @@ -509,3 +510,23 @@ async fn check_wan_address() { } } } + +async fn arkos_static(path: Path) -> Result, ResponseError> { + let client = with_context!(api_client); + let data = client + .get(format!( + "{}/v2/{}", + arkose::Type::GPT4.origin_url(), + path.0.as_str() + )) + .send() + .await?; + let mut builder = Response::builder().status(data.status()); + for (key, value) in data.headers().iter() { + builder = builder.header(key, value); + } + let content = data.bytes().await?; + builder + .body(content.into()) + .map_err(ResponseError::InternalServerError) +} diff --git a/crates/openai/src/serve/proxy/mod.rs b/crates/openai/src/serve/proxy/mod.rs index 46b4bfa12..fa2893ac9 100644 --- a/crates/openai/src/serve/proxy/mod.rs +++ b/crates/openai/src/serve/proxy/mod.rs @@ -93,6 +93,9 @@ pub(crate) fn header_convert( .map_err(ResponseError::InternalServerError)?, ); + h.get("openai-sentinel-chat-requirements-token") + .map(|v| headers.insert("openai-sentinel-chat-requirements-token", v.clone())); + jar.iter() .filter(|c| { let name = c.name().to_lowercase(); diff --git a/crates/openai/src/serve/proxy/req.rs b/crates/openai/src/serve/proxy/req.rs index 5b01d9409..5bea952ea 100644 --- a/crates/openai/src/serve/proxy/req.rs +++ b/crates/openai/src/serve/proxy/req.rs @@ -12,7 +12,7 @@ use serde_json::{json, Value}; use crate::arkose::{ArkoseContext, ArkoseToken, Type}; use crate::constant::{ARKOSE_TOKEN, EMPTY, MODEL, NULL, PUID}; use crate::gpt_model::GPTModel; -use crate::{arkose, info, warn, with_context}; +use crate::{arkose, debug, warn, with_context}; use super::ext::{RequestExt, ResponseExt, SendRequestExt}; use super::header_convert; @@ -127,9 +127,11 @@ async fn handle_conv_request(req: &mut RequestExt) -> Result<(), ResponseError> header::HeaderValue::from_str(chat_requirements_token.as_str()) .map_err(ResponseError::BadRequest)?, ); - info!("Chat requirements token: {}", chat_requirements_token.as_str()) - } - else { + debug!( + "Chat requirements token: {}", + chat_requirements_token.as_str() + ) + } else { warn!("Chat requirements token not found") }