Skip to content

Commit

Permalink
chore: imported workspace should not become the latest visting worksp…
Browse files Browse the repository at this point in the history
…ace (#890)

* chore: imported workspace should not become the latest visting workspace

* chore: sqlx files

* chore: update test

* chore: fmt
  • Loading branch information
appflowy authored Oct 16, 2024
1 parent d3e4a68 commit 9942e68
Show file tree
Hide file tree
Showing 5 changed files with 121 additions and 24 deletions.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

38 changes: 34 additions & 4 deletions libs/database/src/workspace.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use chrono::{DateTime, Utc};
use database_entity::dto::{
AFRole, AFWorkspaceInvitation, AFWorkspaceInvitationStatus, AFWorkspaceSettings, GlobalComment,
PublishInfo, Reaction,
Expand Down Expand Up @@ -622,11 +623,16 @@ pub async fn select_user_profile<'a, E: Executor<'a, Database = Postgres>>(
af_user_row.deleted_at,
af_user_row.updated_at,
af_user_row.created_at,
(SELECT workspace_id
(
SELECT af_workspace_member.workspace_id
FROM af_workspace_member
WHERE uid = af_user_row.uid
ORDER BY updated_at DESC
LIMIT 1) as latest_workspace_id
JOIN af_workspace
ON af_workspace_member.workspace_id = af_workspace.workspace_id
WHERE af_workspace_member.uid = af_user_row.uid
AND COALESCE(af_workspace.is_initialized, true) = true
ORDER BY af_workspace_member.updated_at DESC
LIMIT 1
) AS latest_workspace_id
FROM af_user_row
"#,
user_uuid
Expand Down Expand Up @@ -708,6 +714,30 @@ pub async fn update_updated_at_of_workspace<'a, E: Executor<'a, Database = Postg
Ok(())
}

#[inline]
pub async fn update_updated_at_of_workspace_with_uid<'a, E: Executor<'a, Database = Postgres>>(
executor: E,
uid: i64,
workspace_id: &Uuid,
current_timestamp: DateTime<Utc>,
) -> Result<(), AppError> {
sqlx::query!(
r#"
UPDATE af_workspace_member
SET updated_at = $3
WHERE uid = $1
AND workspace_id = $2;
"#,
uid,
workspace_id,
current_timestamp
)
.execute(executor)
.await?;

Ok(())
}

/// Returns a list of workspaces that the user is part of.
/// User may owner or non-owner.
#[inline]
Expand Down
26 changes: 23 additions & 3 deletions services/appflowy-worker/src/import_worker/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use database::collab::{insert_into_af_collab_bulk_for_user, select_blob_from_af_
use database::resource_usage::{insert_blob_metadata_bulk, BulkInsertMeta};
use database::workspace::{
delete_from_workspace, select_workspace_database_storage_id, update_import_task_status,
update_workspace_status,
update_updated_at_of_workspace_with_uid, update_workspace_status,
};
use database_entity::dto::CollabParams;

Expand All @@ -41,6 +41,7 @@ use redis::{AsyncCommands, RedisResult, Value};
use serde::{Deserialize, Serialize};
use serde_json::from_str;
use sqlx::types::chrono;
use sqlx::types::chrono::{DateTime, Utc};
use sqlx::{PgPool, Pool, Postgres};
use std::collections::HashMap;
use std::env::temp_dir;
Expand Down Expand Up @@ -646,6 +647,8 @@ async fn process_unzip_file(
);
collab_params_list.push(folder_collab_params);

let upload_resources = process_resources(resources).await;

// 6. Start a transaction to insert all collabs
let mut transaction = pg_pool.begin().await.map_err(|err| {
ImportError::Internal(anyhow!(
Expand Down Expand Up @@ -700,7 +703,24 @@ async fn process_unzip_file(
err
))
})?;
let upload_resources = process_resources(resources).await;

// Set the workspace's updated_at to the earliest possible timestamp, as it is created by an import task
// and not actively updated by a user. This ensures that when sorting workspaces by updated_at to find
// the most recent, the imported workspace doesn't appear as the most recently visited workspace.
let updated_at = DateTime::from_timestamp(0, 0).unwrap_or_else(Utc::now);
update_updated_at_of_workspace_with_uid(
transaction.deref_mut(),
import_task.uid,
&workspace_id,
updated_at,
)
.await
.map_err(|err| {
ImportError::Internal(anyhow!(
"Failed to update workspace updated_at when importing data: {:?}",
err
))
})?;

// insert metadata into database
let metas = upload_resources
Expand Down Expand Up @@ -747,7 +767,7 @@ async fn process_unzip_file(
.await
.map_err(|err| ImportError::Internal(anyhow!("Failed to upload files to S3: {:?}", err)))?;

// 3. delete zip file regardless of success or failure
// 8. delete zip file regardless of success or failure
match fs::remove_dir_all(unzip_dir_path).await {
Ok(_) => trace!("[Import]: {} deleted unzip file", import_task.workspace_id),
Err(err) => error!("Failed to delete unzip file: {:?}", err),
Expand Down
61 changes: 46 additions & 15 deletions tests/workspace/import_test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,11 @@ use collab_folder::ViewLayout;
use shared_entity::dto::import_dto::ImportTaskStatus;
use std::path::PathBuf;
use std::time::Duration;

#[tokio::test]
async fn import_blog_post_test() {
// Step 1: Import the blog post zip
let (client, imported_workspace_id) = import_zip("blog_post.zip").await;
let (client, imported_workspace_id) = import_notion_zip_until_complete("blog_post.zip").await;

// Step 2: Fetch the folder and views
let folder = client.get_folder(&imported_workspace_id).await;
Expand Down Expand Up @@ -77,7 +78,7 @@ async fn import_blog_post_test() {

#[tokio::test]
async fn import_project_and_task_zip_test() {
let (client, imported_workspace_id) = import_zip("project&task.zip").await;
let (client, imported_workspace_id) = import_notion_zip_until_complete("project&task.zip").await;
let folder = client.get_folder(&imported_workspace_id).await;
let workspace_database = client.get_workspace_database(&imported_workspace_id).await;
let space_views = folder.get_views_belong_to(&imported_workspace_id);
Expand Down Expand Up @@ -148,9 +149,35 @@ async fn import_project_and_task_zip_test() {
}
}

async fn import_zip(name: &str) -> (TestClient, String) {
#[tokio::test]
async fn imported_workspace_do_not_become_latest_visit_workspace_test() {
let client = TestClient::new_user().await;
let file_path = PathBuf::from("tests/workspace/asset/blog_post.zip".to_string());
client.api_client.import_file(&file_path).await.unwrap();

// When importing a Notion file, a new task is spawned to create a workspace for the imported data.
// However, the workspace should remain hidden until the import is completed successfully.
let user_workspace = client.get_user_workspace_info().await;
let visiting_workspace_id = user_workspace.visiting_workspace.workspace_id;
assert_eq!(user_workspace.workspaces.len(), 1);
assert_eq!(
user_workspace.visiting_workspace.workspace_id,
user_workspace.workspaces[0].workspace_id
);

wait_until_import_complete(&client).await;

// after the workspace was imported, then the workspace should be visible
let user_workspace = client.get_user_workspace_info().await;
assert_eq!(user_workspace.workspaces.len(), 2);
assert_eq!(
user_workspace.visiting_workspace.workspace_id,
visiting_workspace_id,
);
}

async fn import_notion_zip_until_complete(name: &str) -> (TestClient, String) {
let client = TestClient::new_user().await;
let file_path = PathBuf::from(format!("tests/workspace/asset/{name}"));
client.api_client.import_file(&file_path).await.unwrap();
let default_workspace_id = client.workspace_id().await;
Expand All @@ -164,6 +191,22 @@ async fn import_zip(name: &str) -> (TestClient, String) {
assert_eq!(tasks.len(), 1);
assert_eq!(tasks[0].status, ImportTaskStatus::Pending);

wait_until_import_complete(&client).await;

// after the import task is completed, the new workspace should be visible
let workspaces = client.api_client.get_workspaces().await.unwrap();
assert_eq!(workspaces.len(), 2);

let imported_workspace = workspaces
.into_iter()
.find(|workspace| workspace.workspace_id.to_string() != default_workspace_id)
.expect("Failed to find imported workspace");

let imported_workspace_id = imported_workspace.workspace_id.to_string();
(client, imported_workspace_id)
}

async fn wait_until_import_complete(client: &TestClient) {
let mut task_completed = false;
let max_retries = 12;
let mut retries = 0;
Expand All @@ -182,16 +225,4 @@ async fn import_zip(name: &str) -> (TestClient, String) {
task_completed,
"The import task was not completed within the expected time."
);

// after the import task is completed, the new workspace should be visible
let workspaces = client.api_client.get_workspaces().await.unwrap();
assert_eq!(workspaces.len(), 2);

let imported_workspace = workspaces
.into_iter()
.find(|workspace| workspace.workspace_id.to_string() != default_workspace_id)
.expect("Failed to find imported workspace");

let imported_workspace_id = imported_workspace.workspace_id.to_string();
(client, imported_workspace_id)
}

0 comments on commit 9942e68

Please sign in to comment.