Skip to content

Commit

Permalink
fix: clippy warnings and errors
Browse files Browse the repository at this point in the history
  • Loading branch information
aaryanpunia authored and cdxker committed Jul 9, 2024
1 parent 60c63fe commit 3e60174
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 12 deletions.
1 change: 1 addition & 0 deletions server/src/bin/redoc_ci.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use trieve_server::ApiDoc;
use utoipa::OpenApi;

#[allow(clippy::print_stdout)]
fn main() -> std::io::Result<()> {
println!("{}", ApiDoc::openapi().to_pretty_json().unwrap());
Ok(())
Expand Down
2 changes: 1 addition & 1 deletion server/src/bin/sync-qdrant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use trieve_server::{
},
},
};

#[allow(clippy::print_stdout)]
#[tokio::main]
async fn main() -> Result<(), ServiceError> {
dotenvy::dotenv().ok();
Expand Down
12 changes: 3 additions & 9 deletions server/src/operators/chunk_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ pub async fn get_chunk_metadatas_from_point_ids(
let chunk_metadatas = chunk_metadata_pairs
.into_iter()
.map(|(table, tag_set)| {
ChunkMetadata::from_table_and_tag_set(table, tag_set.unwrap_or(vec![]))
ChunkMetadata::from_table_and_tag_set(table, tag_set.unwrap_or_default())
})
.collect();

Expand Down Expand Up @@ -623,15 +623,9 @@ pub async fn bulk_insert_chunk_metadata_query(
.chunk_metadata
.tag_set
.clone()
.unwrap_or(vec![])
.unwrap_or_default()
.iter()
.filter_map(|maybe_tag| {
if let Some(tag) = maybe_tag {
Some(tag.clone())
} else {
None
}
})
.filter_map(|maybe_tag| maybe_tag.clone())
.collect_vec();
let chunk_metadata =
ChunkMetadata::from_table_and_tag_set(chunk_metadata_table, tag_set);
Expand Down
4 changes: 2 additions & 2 deletions server/src/operators/dataset_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ pub async fn delete_chunks_in_dataset(
log::error!("Failed to create event: {:?}", err);
});

println!("Deleted {} chunks from {}", chunk_ids.len(), id);
log::info!("Deleted {} chunks from {}", chunk_ids.len(), id);

// Move to the next batch
last_offset_id = *chunk_ids.last().unwrap();
Expand All @@ -353,7 +353,7 @@ pub async fn delete_chunks_in_dataset(
Ok(())
}

#[tracing::instrument(skip(pool, clickhouse_client))]
#[tracing::instrument(skip_all)]
pub async fn delete_dataset_by_id_query(
id: uuid::Uuid,
pool: web::Data<Pool>,
Expand Down

0 comments on commit 3e60174

Please sign in to comment.