Skip to content

Commit

Permalink
chore: search and chat (#999)
Browse files Browse the repository at this point in the history
* chore: add test for search and chat

* chore: update test

* chore: update test

* chore: update ci

* chore: fix security audio

* chore: multiple core docker build

* chore: multiple core docker build

* chore: update ci

* chore: update model setting

* chore: test ci

* chore: use tiktoken to calcualte token length

* chore: remove env

* chore: use spawn_blocking with condition

* chore: docs

* chore: clippy

* chore: clippy

* chore: docker logs

* chore: pass message id

* chore: clippy
  • Loading branch information
appflowy authored Nov 16, 2024
1 parent d0c212f commit 655f13b
Show file tree
Hide file tree
Showing 32 changed files with 969 additions and 365 deletions.
23 changes: 10 additions & 13 deletions .github/workflows/integration_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,12 @@ jobs:
# the wasm-pack headless tests will run on random ports, so we need to allow all origins
run: sed -i 's/http:\/\/127\.0\.0\.1:8000/http:\/\/127.0.0.1/g' nginx/nginx.conf

- name: Log in to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_HUB_USERNAME }}
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}

- name: Run Docker-Compose
run: |
export APPFLOWY_WORKER_VERSION=${GITHUB_SHA}
Expand All @@ -113,29 +119,20 @@ jobs:
docker compose -f docker-compose-ci.yml up -d
docker ps -a
container_id=$(docker ps --filter name=appflowy-cloud-ai-1 -q)
if [ -n "$container_id" ]; then
echo "Displaying logs for the AppFlowy-AI container..."
docker logs "$container_id"
else
echo "No running container found to display logs."
fi
- name: Install prerequisites
run: |
sudo apt-get update
sudo apt-get install protobuf-compiler
sudo apt-get install -y protobuf-compiler
- name: Run Tests
run: |
echo "Running tests for ${{ matrix.test_service }} with flags: ${{ matrix.test_cmd }}"
RUST_LOG="info" DISABLE_CI_TEST_LOG="true" cargo test ${{ matrix.test_cmd }}
- name: Run Tests from main branch
- name: Docker Logs
if: always()
run: |
git fetch origin main
git checkout main
RUST_LOG="info" DISABLE_CI_TEST_LOG="true" cargo test ${{ matrix.test_cmd }}
docker logs appflowy-cloud-ai-1
cleanup:
name: Cleanup Docker Images
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/push_latest_docker.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,7 @@ jobs:
labels: ${{ steps.meta.outputs.labels }}
provenance: false
build-args: |
PROFILE=release
FEATURES=
- name: Logout from Docker Hub
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

47 changes: 32 additions & 15 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,11 @@ codegen-units = 1
inherits = "release"
debug = true

[profile.ci]
inherits = "release"
opt-level = 2
lto = false # Disable Link-Time Optimization

[patch.crates-io]
# It's diffcult to resovle different version with the same crate used in AppFlowy Frontend and the Client-API crate.
# So using patch to workaround this issue.
Expand All @@ -314,4 +319,5 @@ collab-importer = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev

[features]
history = []
# Some AI test features are not available for self-hosted AppFlowy Cloud. Therefore, AI testing is disabled by default.
ai-test-enabled = ["client-api-test/ai-test-enabled"]
7 changes: 5 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,19 @@ RUN apt update && apt install -y protobuf-compiler lld clang

# Specify a default value for FEATURES; it could be an empty string if no features are enabled by default
ARG FEATURES=""
ARG PROFILE="release"

COPY --from=planner /app/recipe.json recipe.json
# Build our project dependencies
ENV CARGO_BUILD_JOBS=4
RUN cargo chef cook --release --recipe-path recipe.json

COPY . .
ENV SQLX_OFFLINE true

# Build the project
RUN echo "Building with features: ${FEATURES}"
RUN cargo build --profile=release --features "${FEATURES}" --bin appflowy_cloud
RUN echo "Building with profile: ${PROFILE}, features: ${FEATURES}, "
RUN cargo build --profile=${PROFILE} --features "${FEATURES}" --bin appflowy_cloud

FROM debian:bookworm-slim AS runtime
WORKDIR /app
Expand Down
2 changes: 1 addition & 1 deletion deny.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
[advisories]
ignore = ["RUSTSEC-2024-0370"]
ignore = ["RUSTSEC-2024-0370", "RUSTSEC-2024-0384"]
1 change: 1 addition & 0 deletions dev.env
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ APPFLOWY_DATABASE_URL=postgres://postgres:password@localhost:5432/postgres
APPFLOWY_ACCESS_CONTROL=true
APPFLOWY_WEBSOCKET_MAILBOX_SIZE=6000
APPFLOWY_DATABASE_MAX_CONNECTIONS=40
APPFLOWY_DOCUMENT_CONTENT_SPLIT_LEN=8000

# This file is used to set the environment variables for local development
# Copy this file to .env and change the values as needed
Expand Down
4 changes: 3 additions & 1 deletion docker-compose-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,7 @@ services:
dockerfile: Dockerfile
args:
FEATURES: ""
PROFILE: ci
image: appflowyinc/appflowy_cloud:${APPFLOWY_CLOUD_VERSION:-latest}

admin_frontend:
Expand All @@ -138,7 +139,7 @@ services:

ai:
restart: on-failure
image: appflowyinc/appflowy_ai:${APPFLOWY_AI_VERSION:-latest}
image: appflowyinc/appflowy_ai_premium:${APPFLOWY_AI_VERSION:-latest}
ports:
- "5001:5001"
environment:
Expand All @@ -147,6 +148,7 @@ services:
- LOCAL_AI_AWS_SECRET_ACCESS_KEY=${LOCAL_AI_AWS_SECRET_ACCESS_KEY}
- APPFLOWY_AI_SERVER_PORT=${APPFLOWY_AI_SERVER_PORT}
- APPFLOWY_AI_DATABASE_URL=${APPFLOWY_AI_DATABASE_URL}
- APPFLOWY_AI_REDIS_URL=${APPFLOWY_REDIS_URI}

appflowy_worker:
restart: on-failure
Expand Down
3 changes: 1 addition & 2 deletions libs/app-error/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ pub mod gotrue;

#[cfg(feature = "gotrue_error")]
use crate::gotrue::GoTrueError;
use std::error::Error as StdError;
use std::string::FromUtf8Error;

#[cfg(feature = "appflowy_ai_error")]
Expand Down Expand Up @@ -92,7 +91,7 @@ pub enum AppError {
#[error("{desc}: {err}")]
SqlxArgEncodingError {
desc: String,
err: Box<dyn StdError + 'static + Send + Sync>,
err: Box<dyn std::error::Error + 'static + Send + Sync>,
},

#[cfg(feature = "validation_error")]
Expand Down
34 changes: 30 additions & 4 deletions libs/appflowy-ai-client/src/client.rs
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
use crate::dto::{
AIModel, ChatAnswer, ChatQuestion, CompleteTextResponse, CompletionType, CreateChatContext,
CustomPrompt, Document, EmbeddingRequest, EmbeddingResponse, LocalAIConfig, MessageData,
RepeatedLocalAIPackage, RepeatedRelatedQuestion, SearchDocumentsRequest, SummarizeRowResponse,
TranslateRowData, TranslateRowResponse,
AIModel, CalculateSimilarityParams, ChatAnswer, ChatQuestion, CompleteTextResponse,
CompletionType, CreateChatContext, CustomPrompt, Document, EmbeddingRequest, EmbeddingResponse,
LocalAIConfig, MessageData, RepeatedLocalAIPackage, RepeatedRelatedQuestion,
SearchDocumentsRequest, SimilarityResponse, SummarizeRowResponse, TranslateRowData,
TranslateRowResponse,
};
use crate::error::AIError;

Expand Down Expand Up @@ -202,6 +203,7 @@ impl AppFlowyAIClient {
pub async fn send_question(
&self,
chat_id: &str,
question_id: i64,
content: &str,
model: &AIModel,
metadata: Option<Value>,
Expand All @@ -211,6 +213,8 @@ impl AppFlowyAIClient {
data: MessageData {
content: content.to_string(),
metadata,
rag_ids: vec![],
message_id: Some(question_id.to_string()),
},
};
let url = format!("{}/chat/message", self.url);
Expand All @@ -230,13 +234,16 @@ impl AppFlowyAIClient {
chat_id: &str,
content: &str,
metadata: Option<Value>,
rag_ids: Vec<String>,
model: &AIModel,
) -> Result<impl Stream<Item = Result<Bytes, AIError>>, AIError> {
let json = ChatQuestion {
chat_id: chat_id.to_string(),
data: MessageData {
content: content.to_string(),
metadata,
rag_ids,
message_id: None,
},
};
let url = format!("{}/chat/message/stream", self.url);
Expand All @@ -253,15 +260,19 @@ impl AppFlowyAIClient {
pub async fn stream_question_v2(
&self,
chat_id: &str,
question_id: i64,
content: &str,
metadata: Option<Value>,
rag_ids: Vec<String>,
model: &AIModel,
) -> Result<impl Stream<Item = Result<Bytes, AIError>>, AIError> {
let json = ChatQuestion {
chat_id: chat_id.to_string(),
data: MessageData {
content: content.to_string(),
metadata,
rag_ids,
message_id: Some(question_id.to_string()),
},
};
let url = format!("{}/v2/chat/message/stream", self.url);
Expand Down Expand Up @@ -323,6 +334,21 @@ impl AppFlowyAIClient {
.into_data()
}

pub async fn calculate_similarity(
&self,
params: CalculateSimilarityParams,
) -> Result<SimilarityResponse, AIError> {
let url = format!("{}/similarity", self.url);
let resp = self
.http_client(Method::POST, &url)?
.json(&params)
.send()
.await?;
AIResponse::<SimilarityResponse>::from_response(resp)
.await?
.into_data()
}

fn http_client(&self, method: Method, url: &str) -> Result<RequestBuilder, AIError> {
let request_builder = self.client.request(method, url);
Ok(request_builder)
Expand Down
Loading

0 comments on commit 655f13b

Please sign in to comment.