Skip to content

Commit

Permalink
🐛 Fix Arkose Header
Browse files Browse the repository at this point in the history
  • Loading branch information
luoshuijs committed Apr 1, 2024
1 parent d784741 commit 529e427
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 5 deletions.
5 changes: 5 additions & 0 deletions crates/openai/src/serve/proxy/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,11 @@ pub(crate) fn header_convert(
.map_err(ResponseError::InternalServerError)?,
);

h.get("openai-sentinel-chat-requirements-token")
.map(|v| headers.insert("openai-sentinel-chat-requirements-token", v.clone()));
h.get("openai-sentinel-arkose-token")
.map(|v| headers.insert("openai-sentinel-arkose-token", v.clone()));

jar.iter()
.filter(|c| {
let name = c.name().to_lowercase();
Expand Down
28 changes: 23 additions & 5 deletions crates/openai/src/serve/proxy/req.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use serde_json::{json, Value};
use crate::arkose::{ArkoseContext, ArkoseToken, Type};
use crate::constant::{ARKOSE_TOKEN, EMPTY, MODEL, NULL, PUID};
use crate::gpt_model::GPTModel;
use crate::{arkose, info, warn, with_context};
use crate::{arkose, debug, warn, with_context};

use super::ext::{RequestExt, ResponseExt, SendRequestExt};
use super::header_convert;
Expand Down Expand Up @@ -91,6 +91,8 @@ async fn handle_conv_request(req: &mut RequestExt) -> Result<(), ResponseError>
.as_object_mut()
.ok_or(ResponseError::BadRequest(ProxyError::BodyMustBeJsonObject))?;

debug!("Conversation POST Request Body: {:?}", body);

// If model is not exist, then return error
let model = body
.get(MODEL)
Expand Down Expand Up @@ -127,9 +129,11 @@ async fn handle_conv_request(req: &mut RequestExt) -> Result<(), ResponseError>
header::HeaderValue::from_str(chat_requirements_token.as_str())
.map_err(ResponseError::BadRequest)?,
);
info!("Chat requirements token: {}", chat_requirements_token.as_str())
}
else {
debug!(
"Chat requirements token: {}",
chat_requirements_token.as_str()
)
} else {
warn!("Chat requirements token not found")
}

Expand All @@ -141,7 +145,15 @@ async fn handle_conv_request(req: &mut RequestExt) -> Result<(), ResponseError>
let condition = match body.get(ARKOSE_TOKEN) {
Some(s) => {
let s = s.as_str().unwrap_or(EMPTY);
s.is_empty() || s.eq(NULL)
let is_empty = s.is_empty() || s.eq(NULL);
if !is_empty {
req.headers.insert(
header::HeaderName::from_static("openai-sentinel-arkose-token"),
header::HeaderValue::from_str(s).map_err(ResponseError::BadRequest)?,
);
debug!("Sentinel arkose token: {}", s)
}
is_empty
}
None => true,
};
Expand All @@ -160,6 +172,12 @@ async fn handle_conv_request(req: &mut RequestExt) -> Result<(), ResponseError>
req.body = Some(Bytes::from(
serde_json::to_vec(&json).map_err(ResponseError::BadRequest)?,
));
req.headers.insert(
header::HeaderName::from_static("openai-sentinel-arkose-token"),
header::HeaderValue::from_str(arkose_token.value())
.map_err(ResponseError::BadRequest)?,
);
debug!("Sentinel arkose token: {}", arkose_token.value())
}
}

Expand Down

0 comments on commit 529e427

Please sign in to comment.