Skip to content

Commit

Permalink
Example App: Small Axum cleanup and module rework
Browse files Browse the repository at this point in the history
  • Loading branch information
tyleragreen committed Jul 4, 2024
1 parent 276e98c commit 59d3e35
Show file tree
Hide file tree
Showing 5 changed files with 118 additions and 76 deletions.
121 changes: 69 additions & 52 deletions example-app/src/api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,10 @@ use axum::{
routing::{get, post},
Json, Router,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};

use crate::fetcher::Feed;
use crate::models::{CreateFeed, Feed, Status};
use crate::scheduler_interface::ToScheduler;

#[derive(Clone)]
Expand All @@ -35,31 +34,10 @@ pub fn app(scheduler_interface: Arc<dyn ToScheduler + Send + Sync>) -> Router {
.with_state(state)
}

#[derive(Serialize)]
struct Status {
status: String,
}

impl Status {
fn new(status: &str) -> Self {
Self {
status: status.to_string(),
}
}
}

async fn status_handler() -> impl IntoResponse {
Json(Status::new("OK"))
}

#[derive(Clone, Deserialize, Serialize)]
struct CreateFeed {
name: String,
url: String,
frequency: u64,
headers: HashMap<String, String>,
}

async fn post_handler(
state: State<AppState>,
Json(CreateFeed {
Expand All @@ -68,8 +46,11 @@ async fn post_handler(
frequency,
headers,
}): Json<CreateFeed>,
) -> impl IntoResponse {
let id = *(state.next_feed_id.read().unwrap());
) -> Result<impl IntoResponse, StatusCode> {
let id = *(state
.next_feed_id
.read()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?);
let feed = Feed {
id,
name,
Expand All @@ -78,24 +59,37 @@ async fn post_handler(
headers,
};

*(state.next_feed_id.write().unwrap()) += 1;
state.db.write().unwrap().insert(id, feed.clone());
*(state
.next_feed_id
.write()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?) += 1;
state
.db
.write()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.insert(id, feed.clone());
state.scheduler_interface.create(feed.clone());

(StatusCode::CREATED, Json(feed))
Ok((StatusCode::CREATED, Json(feed)))
}

async fn get_handler(path: Path<String>, state: State<AppState>) -> impl IntoResponse {
let id = path.parse::<usize>().map_err(|_| StatusCode::BAD_REQUEST)?;
async fn get_handler(
Path(id): Path<usize>,
state: State<AppState>,
) -> Result<impl IntoResponse, StatusCode> {
let feed = state
.db
.read()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.get(&id)
.cloned()
.ok_or(StatusCode::NOT_FOUND)?;

match state.db.read().unwrap().get(&id).cloned() {
Some(feed) => Ok(Json(feed)),
None => Err(StatusCode::NOT_FOUND),
}
Ok(Json(feed))
}

async fn put_handler(
path: Path<String>,
Path(id): Path<usize>,
state: State<AppState>,
Json(CreateFeed {
name,
Expand All @@ -104,8 +98,13 @@ async fn put_handler(
headers,
}): Json<CreateFeed>,
) -> impl IntoResponse {
let id = path.parse::<usize>().map_err(|_| StatusCode::BAD_REQUEST)?;
if state.db.read().unwrap().get(&id).is_none() {
if state
.db
.read()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.get(&id)
.is_none()
{
return Err(StatusCode::NOT_FOUND);
}

Expand All @@ -117,36 +116,54 @@ async fn put_handler(
headers,
};

state.db.write().unwrap().insert(id, feed.clone());
state
.db
.write()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.insert(id, feed.clone());
state.scheduler_interface.update(feed.clone());

Ok(Json(feed))
}

async fn delete_handler(path: Path<String>, state: State<AppState>) -> impl IntoResponse {
let id = path.parse::<usize>().map_err(|_| StatusCode::BAD_REQUEST)?;
let feed = match state.db.read().unwrap().get(&id).cloned() {
Some(f) => f,
None => return Err(StatusCode::NOT_FOUND),
};

state.db.write().unwrap().remove(&feed.id);
async fn delete_handler(
Path(id): Path<usize>,
state: State<AppState>,
) -> Result<impl IntoResponse, StatusCode> {
let feed = state
.db
.read()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.get(&id)
.cloned()
.ok_or(StatusCode::NOT_FOUND)?;

state
.db
.write()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.remove(&feed.id);
state.scheduler_interface.delete(feed);

Ok(StatusCode::NO_CONTENT)
}

async fn list_handler(state: State<AppState>) -> impl IntoResponse {
let feeds: Vec<Feed> = state.db.read().unwrap().values().cloned().collect();
Json(feeds)
async fn list_handler(state: State<AppState>) -> Result<impl IntoResponse, StatusCode> {
let feeds: Vec<Feed> = state
.db
.read()
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.values()
.cloned()
.collect();
Ok(Json(feeds))
}

#[cfg(test)]
mod api_tests {
#[cfg(not(feature = "use_dependencies"))]
use crate::deps::mime;

use crate::fetcher::Feed;
use crate::scheduler_interface::{SchedulerInterface, TaskSender};
use tokio::net::TcpListener;
use tulsa::AsyncTask;
Expand Down Expand Up @@ -227,7 +244,7 @@ mod api_tests {
let body = axum::body::to_bytes(response.into_body(), usize::MAX)
.await
.unwrap();
assert_eq!(body.len(), 0);
assert_eq!(&body[..], b"Invalid URL: Cannot parse `\"abc\"` to a `u64`");

let sender = Arc::new(Mutex::new(MockSender::new()));
let interface = Arc::new(SchedulerInterface::new(sender));
Expand All @@ -246,7 +263,7 @@ mod api_tests {
let body = axum::body::to_bytes(response.into_body(), usize::MAX)
.await
.unwrap();
assert_eq!(body.len(), 0);
assert_eq!(&body[..], b"Invalid URL: Cannot parse `\"-1\"` to a `u64`");
}

#[tokio::test]
Expand Down
24 changes: 1 addition & 23 deletions example-app/src/fetcher.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,30 +8,8 @@ use ureq;
mod transit {
include!(concat!(env!("OUT_DIR"), "/transit_realtime.rs"));
}
use std::collections::HashMap;

use reqwest::header::{HeaderMap, HeaderName};
use serde::{Deserialize, Serialize};

#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Feed {
pub id: usize,
pub name: String,
pub url: String,
pub frequency: u64,
pub headers: HashMap<String, String>,
}

impl Feed {
pub fn to_header_map(&self) -> HeaderMap {
let mut headers = HeaderMap::new();
for (key, value) in self.headers.iter() {
let new_key: HeaderName = key.parse().unwrap();
headers.insert(new_key, value.parse().unwrap());
}
headers
}
}
use crate::models::Feed;

async fn fetch(feed: &Feed) -> usize {
println!("Fetching {}", feed.name);
Expand Down
1 change: 1 addition & 0 deletions example-app/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
pub mod api;
pub mod fetcher;
pub mod models;
pub mod scheduler_interface;

// The deps module is an effort to re-implement my third-party dependencies as
Expand Down
45 changes: 45 additions & 0 deletions example-app/src/models/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
use reqwest::header::{HeaderMap, HeaderName};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Feed {
pub id: usize,
pub name: String,
pub url: String,
pub frequency: u64,
pub headers: HashMap<String, String>,
}

impl Feed {
pub fn to_header_map(&self) -> HeaderMap {
let mut headers = HeaderMap::new();
for (key, value) in self.headers.iter() {
let new_key: HeaderName = key.parse().unwrap();
headers.insert(new_key, value.parse().unwrap());
}
headers
}
}

/// This represents a [`Feed`] but without an ID, which are used in POST bodies.
#[derive(Clone, Deserialize, Serialize)]
pub struct CreateFeed {
pub name: String,
pub url: String,
pub frequency: u64,
pub headers: HashMap<String, String>,
}

#[derive(Serialize)]
pub struct Status {
status: String,
}

impl Status {
pub fn new(status: &str) -> Self {
Self {
status: status.to_string(),
}
}
}
3 changes: 2 additions & 1 deletion example-app/src/scheduler_interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@ use std::sync::{Arc, Mutex};
use std::time::Duration;
use tulsa::{AsyncTask, Scheduler, SyncTask};

use crate::fetcher::{fetch_sync, recurring_fetch, Feed};
use crate::fetcher::{fetch_sync, recurring_fetch};
use crate::models::Feed;

pub enum Mode {
Sync,
Expand Down

0 comments on commit 59d3e35

Please sign in to comment.