Skip to content

Commit

Permalink
cleanup: cargo clippy and fixed redoc
Browse files Browse the repository at this point in the history
  • Loading branch information
densumesh authored and cdxker committed May 10, 2024
1 parent 5e3917c commit 17c0f65
Show file tree
Hide file tree
Showing 9 changed files with 38 additions and 88 deletions.
4 changes: 2 additions & 2 deletions server/src/bin/ingestion-worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -439,7 +439,7 @@ pub async fn bulk_upload_chunks(
metadata: message.chunk.metadata.clone(),
tracking_id: chunk_tracking_id,
time_stamp: timestamp,
location: message.chunk.location.clone(),
location: message.chunk.location,
dataset_id: payload.dataset_id,
weight: message.chunk.weight.unwrap_or(0.0),
};
Expand Down Expand Up @@ -658,7 +658,7 @@ async fn upload_chunk(
metadata: payload.chunk.metadata.clone(),
tracking_id: chunk_tracking_id,
time_stamp: timestamp,
location: payload.chunk.location.clone(),
location: payload.chunk.location,
dataset_id: payload.ingest_specific_chunk_metadata.dataset_id,
weight: payload.chunk.weight.unwrap_or(0.0),
};
Expand Down
10 changes: 5 additions & 5 deletions server/src/data/models.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,9 +224,9 @@ pub enum GeoTypes {
Float(f64),
}

impl Into<f64> for GeoTypes {
fn into(self) -> f64 {
match self {
impl From<GeoTypes> for f64 {
fn from(val: GeoTypes) -> Self {
match val {
GeoTypes::Int(i) => i as f64,
GeoTypes::Float(f) => f,
}
Expand Down Expand Up @@ -2063,7 +2063,7 @@ impl QdrantPayload {
time_stamp: chunk_metadata.time_stamp.map(|x| x.timestamp()),
dataset_id: dataset_id.unwrap_or(chunk_metadata.dataset_id),
content: convert_html_to_text(&chunk_metadata.chunk_html.unwrap_or_default()),
group_ids: group_ids,
group_ids,
location: chunk_metadata.location,
}
}
Expand Down Expand Up @@ -2096,7 +2096,7 @@ impl QdrantPayload {
.as_str()
.map(|s| uuid::Uuid::parse_str(s).unwrap())
.unwrap_or_default(),
group_ids: group_ids,
group_ids,
content: point
.payload
.get("content")
Expand Down
13 changes: 2 additions & 11 deletions server/src/handlers/chunk_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -558,7 +558,7 @@ pub async fn update_chunk(
})
.transpose()?
.or(chunk_metadata.time_stamp),
update_chunk_data.location.clone(),
update_chunk_data.location,
dataset_id,
update_chunk_data.weight.unwrap_or(1.0),
);
Expand Down Expand Up @@ -1011,9 +1011,6 @@ pub async fn search_chunks(
dataset_org_plan_sub.dataset.server_configuration.clone(),
);

let page = data.page.unwrap_or(1);
let get_total_pages = data.get_total_pages.unwrap_or(false);

let parsed_query = parse_query(data.query.clone());

let tx_ctx = sentry::TransactionContext::new("search", "search_chunks");
Expand All @@ -1033,8 +1030,6 @@ pub async fn search_chunks(
search_full_text_chunks(
data.clone(),
parsed_query,
page,
get_total_pages,
pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand All @@ -1046,8 +1041,6 @@ pub async fn search_chunks(
search_hybrid_chunks(
data.clone(),
parsed_query,
page,
get_total_pages,
pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand All @@ -1059,8 +1052,6 @@ pub async fn search_chunks(
search_semantic_chunks(
data.clone(),
parsed_query,
page,
get_total_pages,
pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand Down Expand Up @@ -1503,7 +1494,7 @@ pub async fn get_recommended_chunks(

slim_chunks
.into_iter()
.map(|chunk| ChunkMetadata::from(chunk))
.map(ChunkMetadata::from)
.collect::<Vec<ChunkMetadata>>()
}
_ => get_chunk_metadatas_from_point_ids(
Expand Down
17 changes: 0 additions & 17 deletions server/src/handlers/group_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1192,8 +1192,6 @@ pub async fn search_within_group(
);

//search over the links as well
let page = data.page.unwrap_or(1);
let get_total_pages = data.get_total_pages.unwrap_or(false);
let group_id = data.group_id;
let dataset_id = dataset_org_plan_sub.dataset.id;
let search_pool = pool.clone();
Expand Down Expand Up @@ -1226,8 +1224,6 @@ pub async fn search_within_group(
data.clone(),
parsed_query,
group,
page,
get_total_pages,
search_pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand All @@ -1239,8 +1235,6 @@ pub async fn search_within_group(
data.clone(),
parsed_query,
group,
page,
get_total_pages,
search_pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand All @@ -1252,8 +1246,6 @@ pub async fn search_within_group(
data.clone(),
parsed_query,
group,
page,
get_total_pages,
search_pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand Down Expand Up @@ -1341,9 +1333,6 @@ pub async fn search_over_groups(
dataset_org_plan_sub.dataset.server_configuration.clone(),
);

let page = data.page.unwrap_or(1);
let get_total_pages = data.get_total_pages.unwrap_or(false);

let parsed_query = parse_query(data.query.clone());

let mut timer = Timer::new();
Expand All @@ -1360,8 +1349,6 @@ pub async fn search_over_groups(
full_text_search_over_groups(
data.clone(),
parsed_query,
page,
get_total_pages,
pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand All @@ -1373,8 +1360,6 @@ pub async fn search_over_groups(
hybrid_search_over_groups(
data.clone(),
parsed_query,
page,
get_total_pages,
pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand All @@ -1386,8 +1371,6 @@ pub async fn search_over_groups(
semantic_search_over_groups(
data.clone(),
parsed_query,
page,
get_total_pages,
pool,
dataset_org_plan_sub.dataset,
server_dataset_config,
Expand Down
4 changes: 0 additions & 4 deletions server/src/handlers/message_handler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -665,8 +665,6 @@ pub async fn stream_response(
let result_chunks = search_hybrid_chunks(
search_chunk_data,
parsed_query,
1,
false,
pool.clone(),
dataset.clone(),
dataset_config,
Expand Down Expand Up @@ -919,8 +917,6 @@ pub async fn create_suggested_queries_handler(
quote_words: None,
negated_words: None,
},
1,
false,
pool,
dataset_org_plan_sub.dataset.clone(),
dataset_config,
Expand Down
1 change: 1 addition & 0 deletions server/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,7 @@ impl Modify for SecurityAddon {
handlers::group_handler::SearchWithinGroupResults,
handlers::chunk_handler::SearchChunkQueryResponseBody,
handlers::chunk_handler::ChunkFilter,
data::models::DateRange,
data::models::FieldCondition,
data::models::Range,
handlers::chunk_handler::GetChunksData,
Expand Down
8 changes: 4 additions & 4 deletions server/src/operators/chunk_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ pub async fn get_chunk_metadatas_and_collided_chunks_from_point_ids_query(
metadata: chunk.0.metadata.clone(),
tracking_id: chunk.0.tracking_id.clone(),
time_stamp: chunk.0.time_stamp,
location: chunk.0.location.clone(),
location: chunk.0.location,
dataset_id: chunk.0.dataset_id,
weight: chunk.0.weight,
})
Expand Down Expand Up @@ -251,7 +251,7 @@ pub async fn get_chunk_metadatas_and_collided_chunks_from_point_ids_query(
metadata: chunk.0.metadata.clone(),
tracking_id: chunk.0.tracking_id.clone(),
time_stamp: chunk.0.time_stamp,
location: chunk.0.location.clone(),
location: chunk.0.location,
dataset_id: chunk.0.dataset_id,
weight: chunk.0.weight,
};
Expand Down Expand Up @@ -341,7 +341,7 @@ pub async fn get_slim_chunks_from_point_ids_query(
metadata: slim_chunk.metadata.clone(),
tracking_id: slim_chunk.tracking_id.clone(),
time_stamp: slim_chunk.time_stamp,
location: slim_chunk.location.clone(),
location: slim_chunk.location,
dataset_id: slim_chunk.dataset_id,
weight: slim_chunk.weight,
})
Expand Down Expand Up @@ -1214,7 +1214,7 @@ pub async fn create_chunk_metadata(
chunk.metadata.clone(),
chunk_tracking_id,
timestamp,
chunk.location.clone(),
chunk.location,
dataset_uuid,
chunk.weight.unwrap_or(0.0),
);
Expand Down
2 changes: 1 addition & 1 deletion server/src/operators/qdrant_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1174,7 +1174,7 @@ pub async fn point_id_exists_in_qdrant(
ServiceError::BadRequest("Failed to fetch points from qdrant".to_string())
})?;

Ok(data.result.len() > 0)
Ok(!data.result.is_empty())
}

#[tracing::instrument]
Expand Down
Loading

0 comments on commit 17c0f65

Please sign in to comment.