Skip to content

Commit

Permalink
Run cargo fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
kylerchin committed Mar 20, 2024
1 parent 00f8ede commit cea0228
Show file tree
Hide file tree
Showing 5 changed files with 65 additions and 57 deletions.
68 changes: 34 additions & 34 deletions src/maple/gtfs_handlers/colour_correction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,40 +17,40 @@ pub fn fix_background_colour_rgb_feed_route(
) -> RGB<u8> {
match feed_id.as_str() {
"f-9q5b-longbeachtransit" => {
match route.id.as_str() {
"1" => RGB::new(247,161,129),
"2" => RGB::new(228,228,23),
"8" => RGB::new(0,167,78),
"22" => RGB::new(167,66,62),
//passport
"37" => RGB::new(154,75,157),
"41" => RGB::new(238,41,46),
"45" => RGB::new(0,134,172),
"46" => RGB::new(116,129,55),
"51" => RGB::new(170,96,161),
"61" => RGB::new(238,41,46),
"71" => RGB::new(4,82,161),
"91" => RGB::new(240,94,140),
"92" => RGB::new(240,126,72),
"93" => RGB::new(173,113,175),
"94" => RGB::new(240,94,140),
"101" => RGB::new(44,186,151),
"102" => RGB::new(118,90,165),
"103" => RGB::new(242,131,179),
"111" => RGB::new(39,127,195),
"112" => RGB::new(23,176,80),
"131" => RGB::new(158,120,89),
"151" => RGB::new(249,165,27),
"171" => RGB::new(0,151,104),
"172" => RGB::new(241,86,41),
"173" => RGB::new(194,56,38),
"174" => RGB::new(5,98,132),
"182" => RGB::new(155,43,103),
"191" => RGB::new(139,157,208),
"192" => RGB::new(237,72,154),
"405" => RGB::new(0,181,236),
_ => fix_background_colour_rgb(background)
}
match route.id.as_str() {
"1" => RGB::new(247, 161, 129),
"2" => RGB::new(228, 228, 23),
"8" => RGB::new(0, 167, 78),
"22" => RGB::new(167, 66, 62),
//passport
"37" => RGB::new(154, 75, 157),
"41" => RGB::new(238, 41, 46),
"45" => RGB::new(0, 134, 172),
"46" => RGB::new(116, 129, 55),
"51" => RGB::new(170, 96, 161),
"61" => RGB::new(238, 41, 46),
"71" => RGB::new(4, 82, 161),
"91" => RGB::new(240, 94, 140),
"92" => RGB::new(240, 126, 72),
"93" => RGB::new(173, 113, 175),
"94" => RGB::new(240, 94, 140),
"101" => RGB::new(44, 186, 151),
"102" => RGB::new(118, 90, 165),
"103" => RGB::new(242, 131, 179),
"111" => RGB::new(39, 127, 195),
"112" => RGB::new(23, 176, 80),
"131" => RGB::new(158, 120, 89),
"151" => RGB::new(249, 165, 27),
"171" => RGB::new(0, 151, 104),
"172" => RGB::new(241, 86, 41),
"173" => RGB::new(194, 56, 38),
"174" => RGB::new(5, 98, 132),
"182" => RGB::new(155, 43, 103),
"191" => RGB::new(139, 157, 208),
"192" => RGB::new(237, 72, 154),
"405" => RGB::new(0, 181, 236),
_ => fix_background_colour_rgb(background),
}
}
"f-9q5-metro~losangeles" => {
if background == WHITE_RGB {
Expand Down
4 changes: 2 additions & 2 deletions src/maple/gtfs_handlers/flatten.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ fn flatten_feed(feed_id: &str) -> Result<(), Box<dyn Error>> {
// unzip

// go into folder and unnest folders

Ok(())
}
}
2 changes: 1 addition & 1 deletion src/maple/gtfs_handlers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,4 @@ pub struct DownloadAttempt {
pub failed: bool,
pub mark_for_redo: bool,
pub ingestion_version: i32,
}
}
39 changes: 23 additions & 16 deletions src/maple/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ use git2::Repository;
use crate::transitland_download::DownloadedFeedsInformation;

async fn run_ingest() -> Result<(), Box<dyn Error>> {

const maple_ingestion_version: i32 = 1;

//Ensure git submodule transitland-atlas downloads and updates correctly
Expand All @@ -36,26 +35,26 @@ async fn run_ingest() -> Result<(), Box<dyn Error>> {
match repo.find_submodule("transitland-atlas") {
Ok(transitland_submodule) => {
println!("Submodule found.");

let mut transitland_submodule = transitland_submodule;

match transitland_submodule.update(true, None) {
Ok(update) => {
println!("Submodule updated.");
},
}
Err(update_err) => {
eprintln!("Unable to update submodule");

// don't need to fail if can't reach github servers for now
}
}
},
}
Err(find_submodule) => {
eprintln!("Can't find submodule!");
return Err(Box::new(find_submodule));
}
}
},
}
Err(repo_err) => {
eprintln!("Can't find own repo!");
return Err(Box::new(repo_err));
Expand Down Expand Up @@ -106,28 +105,38 @@ async fn run_ingest() -> Result<(), Box<dyn Error>> {
// count eligible feeds that are marked ingest == true using a filter and .len()

let mut counter_of_eligible_feeds: Option<usize> = match &eligible_feeds {
Ok(eligible_feeds) => {
Some(eligible_feeds.iter().filter(|download_feed_info| download_feed_info.ingest == true).collect::<Vec<&DownloadedFeedsInformation>>().len())
}
Err(_) => None
Ok(eligible_feeds) => Some(
eligible_feeds
.iter()
.filter(|download_feed_info| download_feed_info.ingest == true)
.collect::<Vec<&DownloadedFeedsInformation>>()
.len(),
),
Err(_) => None,
};

// debug print to output
match counter_of_eligible_feeds {
Some(counter_of_eligible_feeds) => {
println!("{} feeds marked ready for schedule ingestion.", counter_of_eligible_feeds);
},
println!(
"{} feeds marked ready for schedule ingestion.",
counter_of_eligible_feeds
);
}
None => {
println!("Unable to get eligible feed list.");
}
}

//refresh the metadata for anything that's changed
//refresh the metadata for anything that's changed

//insert the feeds that are new

if let Ok(eligible_feeds) = eligible_feeds {
let to_ingest_feeds = eligible_feeds.iter().filter(|download_feed_info| download_feed_info.ingest == true).collect::<Vec<&DownloadedFeedsInformation>>();
let to_ingest_feeds = eligible_feeds
.iter()
.filter(|download_feed_info| download_feed_info.ingest == true)
.collect::<Vec<&DownloadedFeedsInformation>>();

// for now, use a thread pool
// in the future, map reduce this job out to worker servers
Expand All @@ -139,11 +148,9 @@ async fn run_ingest() -> Result<(), Box<dyn Error>> {
// use k/d tree presentation to calculate line optimisation and transfer patterns (not clear how this works, needs further research)
// hand off to routing algorithm preprocessing engine Prarie (needs further research and development)


// Folder unzip time!

// perform additional checks to ensure feed is not a zip bomb

}

//determine if the old one should be deleted, if so, delete it
Expand Down
9 changes: 5 additions & 4 deletions src/maple/transitland_download.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::gtfs_handlers::DownloadAttempt;
use dmfr_folder_reader::ReturnDmfrAnalysis;
use futures;
use futures::StreamExt;
Expand All @@ -9,10 +10,9 @@ use std::fs::File;
use std::io::copy;
use std::io::Write;
use std::sync::Arc;
use std::time::SystemTime;
use std::time::Duration;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
use crate::gtfs_handlers::DownloadAttempt;

#[derive(Clone)]
struct StaticFeedToDownload {
Expand Down Expand Up @@ -45,7 +45,7 @@ pub struct StaticPassword {
pub header_auth_key: Option<String>,
// this would be "Bearer" so the header would insert Authorization: Bearer {key}
pub header_auth_value_prefix: Option<String>,
pub url_auth_key: Option<String>
pub url_auth_key: Option<String>,
}

// This is an efficient method to scan all static ingests and only insert what is new.
Expand All @@ -67,7 +67,8 @@ pub async fn download_return_eligible_feeds(
if let Ok(entries) = fs::read_dir("transitland-atlas/feeds") {
println!("Downloading zip files now");

let static_passwords = sqlx::query_as!(StaticPassword,"SELECT * FROM gtfs.static_passwords;");
let static_passwords =
sqlx::query_as!(StaticPassword, "SELECT * FROM gtfs.static_passwords;");

let feeds_to_download = transitland_meta.feed_hashmap.iter().filter(|(_, feed)| match feed.spec {
dmfr::FeedSpec::Gtfs => true,
Expand Down

0 comments on commit cea0228

Please sign in to comment.