Skip to content

Commit

Permalink
Introduce PROFILE_CACHE_TTL
Browse files Browse the repository at this point in the history
  • Loading branch information
lucemans committed Apr 17, 2024
1 parent df210c3 commit 89f5a5e
Show file tree
Hide file tree
Showing 10 changed files with 26 additions and 15 deletions.
1 change: 1 addition & 0 deletions server/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -8,3 +8,4 @@ IPFS_GATEWAY=https://ipfs.io/ipfs/
# Optionally you can specify a comma-seperated list PROFILE_RECORDS, however if not provided there are sensible defaults
# PROFILE_RECORDS=com.discord,com.twitter
# MAX_BULK_SIZE=10
# PROFILE_CACHE_TTL=600
6 changes: 4 additions & 2 deletions server/src/routes/address.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,8 @@ pub async fn get_bulk(
Qs(query): Qs<AddressGetBulkQuery>,
State(state): State<Arc<crate::AppState>>,
) -> Result<Json<ListResponse<BulkResponse<Profile>>>, RouteError> {
let addresses = validate_bulk_input(&query.addresses, state.service.max_bulk_size)?;
let addresses =
validate_bulk_input(&query.addresses, state.service.max_bulk_size.unwrap_or(10))?;

let addresses = addresses
.iter()
Expand All @@ -109,7 +110,8 @@ pub async fn get_bulk_sse(
Qs(query): Qs<AddressGetBulkQuery>,
State(state): State<Arc<crate::AppState>>,
) -> impl IntoResponse {
let addresses = validate_bulk_input(&query.addresses, state.service.max_bulk_size).unwrap();
let addresses =
validate_bulk_input(&query.addresses, state.service.max_bulk_size.unwrap_or(10)).unwrap();

let (event_tx, event_rx) = tokio::sync::mpsc::unbounded_channel::<Result<Event, Infallible>>();

Expand Down
5 changes: 3 additions & 2 deletions server/src/routes/name.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ pub async fn get_bulk(
Qs(query): Qs<NameGetBulkQuery>,
State(state): State<Arc<crate::AppState>>,
) -> Result<Json<ListResponse<BulkResponse<Profile>>>, RouteError> {
let names = validate_bulk_input(&query.names, state.service.max_bulk_size)?;
let names = validate_bulk_input(&query.names, state.service.max_bulk_size.unwrap_or(10))?;

let profiles = names
.into_iter()
Expand All @@ -94,7 +94,8 @@ pub async fn get_bulk_sse(
Qs(query): Qs<NameGetBulkQuery>,
State(state): State<Arc<crate::AppState>>,
) -> impl IntoResponse {
let names = validate_bulk_input(&query.names, state.service.max_bulk_size).unwrap();
let names =
validate_bulk_input(&query.names, state.service.max_bulk_size.unwrap_or(10)).unwrap();

let (event_tx, event_rx) = tokio::sync::mpsc::unbounded_channel::<Result<Event, Infallible>>();

Expand Down
5 changes: 3 additions & 2 deletions server/src/routes/universal.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ pub async fn get_bulk(
Qs(query): Qs<UniversalGetBulkQuery>,
State(state): State<Arc<crate::AppState>>,
) -> Result<Json<ListResponse<BulkResponse<Profile>>>, RouteError> {
let queries = validate_bulk_input(&query.queries, state.service.max_bulk_size)?;
let queries = validate_bulk_input(&query.queries, state.service.max_bulk_size.unwrap_or(10))?;

let profiles = queries
.iter()
Expand All @@ -95,7 +95,8 @@ pub async fn get_bulk_sse(
Qs(query): Qs<UniversalGetBulkQuery>,
State(state): State<Arc<crate::AppState>>,
) -> impl IntoResponse {
let queries = validate_bulk_input(&query.queries, state.service.max_bulk_size).unwrap();
let queries =
validate_bulk_input(&query.queries, state.service.max_bulk_size.unwrap_or(10)).unwrap();

let (event_tx, event_rx) = tokio::sync::mpsc::unbounded_channel::<Result<Event, Infallible>>();

Expand Down
5 changes: 4 additions & 1 deletion server/src/state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,9 @@ impl AppState {
.parse::<H160>()
.expect("UNIVERSAL_RESOLVER should be a valid address");

let max_bulk_size = env::var("MAX_BULK_SIZE").unwrap_or_else(|_| "10".to_string()).parse().unwrap();
let max_bulk_size = env::var("MAX_BULK_SIZE").unwrap_or_else(|_| "10".to_string()).parse().ok();

let cache_ttl = env::var("PROFILE_CACHE_TTL").unwrap_or_else(|_| "600".to_string()).parse().ok();

Self {
service: ENSService {
Expand All @@ -84,6 +86,7 @@ impl AppState {
ipfs_gateway,
arweave_gateway,
max_bulk_size,
cache_ttl,
profile_records: Arc::from(profile_records),
profile_chains: Arc::from(multicoin_chains),
universal_resolver,
Expand Down
4 changes: 2 additions & 2 deletions shared/src/core/address.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use ethers_core::types::Address;
use thiserror::Error;
use tracing::instrument;

use crate::core::ENSService;
use crate::core::resolvers::reverse::{resolve_reverse, ReverseResolveError};
use crate::core::ENSService;

#[derive(Error, Debug)]
pub enum AddressResolveError {
Expand Down Expand Up @@ -59,7 +59,7 @@ impl ENSService {

// Cache the value, and expire it after 10 minutes
self.cache
.set(&cache_key, &result, 600)
.set(&cache_key, &result, self.cache_ttl.unwrap_or(600))
.await
.map_err(|_| AddressResolveError::CacheFail("set"))?;

Expand Down
3 changes: 2 additions & 1 deletion shared/src/core/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -58,5 +58,6 @@ pub struct ENSService {
pub profile_records: Arc<[String]>,
pub profile_chains: Arc<[CoinType]>,
pub universal_resolver: H160,
pub max_bulk_size: usize,
pub max_bulk_size: Option<usize>,
pub cache_ttl: Option<u32>,
}
5 changes: 3 additions & 2 deletions shared/src/core/profile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ use ethers_ccip_read::CCIPReadMiddleware;
use tracing::{info, instrument};

use crate::cache::CacheError;
use crate::core::{ENSService, Profile};
use crate::core::error::ProfileError;
use crate::core::lookup_data::LookupInfo;
use crate::core::{ENSService, Profile};
use crate::models::lookup::ENSLookup;
use crate::utils::eip55::EIP55Address;

Expand Down Expand Up @@ -52,6 +52,7 @@ impl ENSService {
if let Ok(entry) = entry_result {
return Ok(entry);
}
// TODO: Else, warn about unparsable data in cache
}
}

Expand Down Expand Up @@ -150,7 +151,7 @@ impl ENSService {
serde_json::to_string(&value).map_err(|err| ProfileError::Other(err.to_string()))?;

self.cache
.set(&cache_key, &response, 600)
.set(&cache_key, &response, self.cache_ttl.unwrap_or(600))
.await
.map_err(|CacheError::Other(err)| {
ProfileError::Other(format!("cache set failed: {}", err))
Expand Down
4 changes: 2 additions & 2 deletions shared/src/core/records.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ use ethers::prelude::Address;
use ethers_ccip_read::CCIPReadMiddleware;
use tracing::instrument;

use crate::core::ENSService;
use crate::core::error::ProfileError;
use crate::core::lookup_data::LookupInfo;
use crate::core::ENSService;
use crate::models::lookup::{ENSLookup, ENSLookupError, LookupState};

use super::resolvers::universal::resolve_universal;
Expand Down Expand Up @@ -115,7 +115,7 @@ impl ENSService {
// .map_err(|err| ProfileResolveError::Other(err.to_string()))?;
//
// self.cache
// .set(&cache_key, &response, 600)
// .set(&cache_key, &response, self.cache_ttl.unwrap_or(600))
// .await
// .map_err(|CacheError::Other(err)| {
// ProfileResolveError::Other(format!("cache set failed: {}", err))
Expand Down
3 changes: 2 additions & 1 deletion worker/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,8 @@ async fn main(req: Request, env: Env, _ctx: Context) -> worker::Result<Response>
opensea_api_key,
ipfs_gateway,
arweave_gateway,
max_bulk_size: 10,
max_bulk_size: None,
cache_ttl: Some(600),
profile_records: Arc::from(profile_records),
profile_chains: Arc::from(profile_chains),
universal_resolver,
Expand Down

0 comments on commit 89f5a5e

Please sign in to comment.