Skip to content

Commit

Permalink
bugfix: optionally query the previous scrape request and don't error if
Browse files Browse the repository at this point in the history
it doesn't exist
  • Loading branch information
cdxker committed Oct 10, 2024
1 parent 9a027e3 commit 4e1ac82
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions server/src/operators/crawl_operator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ pub async fn update_crawl_settings_for_dataset(
.await
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;

let crawl_req = crawl_requests_table::crawl_requests
let prev_crawl_req = crawl_requests_table::crawl_requests
.select((
crawl_requests_table::id,
crawl_requests_table::url,
Expand All @@ -353,7 +353,8 @@ pub async fn update_crawl_settings_for_dataset(
))
.filter(crawl_requests_table::dataset_id.eq(dataset_id))
.first::<CrawlRequestPG>(&mut conn)
.await;
.await
.optional()?;

if let Some(ref url) = crawl_options.site_url {
diesel::update(
Expand Down Expand Up @@ -382,14 +383,13 @@ pub async fn update_crawl_settings_for_dataset(
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
}

let previous_crawl_options: CrawlOptions = serde_json::from_value(
crawl_req
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?
.crawl_options,
)
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
let merged_options = if let Some(prev_crawl_req) = prev_crawl_req {
let previous_crawl_options: CrawlOptions = serde_json::from_value(prev_crawl_req.crawl_options).map_err(|e| ServiceError::InternalServerError(e.to_string()))?;
crawl_options.merge(previous_crawl_options)
} else {
crawl_options
};

let merged_options = crawl_options.merge(previous_crawl_options);

diesel::update(
crawl_requests_table::crawl_requests
Expand All @@ -405,7 +405,7 @@ pub async fn update_crawl_settings_for_dataset(
.map_err(|e| ServiceError::InternalServerError(e.to_string()))?;

crawl(
crawl_options.clone(),
merged_options.clone(),
pool.clone(),
redis_pool.clone(),
dataset_id,
Expand Down

0 comments on commit 4e1ac82

Please sign in to comment.