Skip to content

Commit

Permalink
Merge pull request #185 from omid/update_to_syn_2
Browse files Browse the repository at this point in the history
Update to syn 2
  • Loading branch information
jaemk authored Apr 7, 2024
2 parents fdddef0 + 68ba7d1 commit fca60ec
Show file tree
Hide file tree
Showing 16 changed files with 106 additions and 102 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
## [Unreleased]
## Added
## Changed
- [Breaking] `type` attribute is now `ty`
- Upgrade to syn2
## Removed

## [0.49.3]
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ use cached::SizedCache;

/// Use an explicit cache-type with a custom creation block and custom cache-key generating block
#[cached(
type = "SizedCache<String, usize>",
ty = "SizedCache<String, usize>",
create = "{ SizedCache::with_size(100) }",
convert = r#"{ format!("{}{}", a, b) }"#
)]
Expand Down Expand Up @@ -123,7 +123,7 @@ enum ExampleError {
/// by your function. All `io_cached` functions must return `Result`s.
#[io_cached(
map_error = r##"|e| ExampleError::RedisError(format!("{:?}", e))"##,
type = "AsyncRedisCache<u64, String>",
ty = "AsyncRedisCache<u64, String>",
create = r##" {
AsyncRedisCache::new("cached_redis_prefix", 1)
.set_refresh(true)
Expand Down
7 changes: 2 additions & 5 deletions cached_proc_macro/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@ proc-macro = true

[dependencies]
quote = "1.0.6"
darling = "0.14.2"
darling = "0.20.8"
proc-macro2 = "1.0.49"

[dependencies.syn]
version = "1.0.27"
features = ["full"]
syn = "2.0.52"
53 changes: 27 additions & 26 deletions cached_proc_macro/src/cached.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
use crate::helpers::*;
use darling::ast::NestedMeta;
use darling::FromMeta;
use proc_macro::TokenStream;
use quote::quote;
use syn::spanned::Spanned;
use syn::{parse_macro_input, parse_str, AttributeArgs, Block, Ident, ItemFn, ReturnType, Type};
use syn::{parse_macro_input, parse_str, Block, Ident, ItemFn, ReturnType, Type};

#[derive(FromMeta)]
struct MacroArgs {
Expand All @@ -29,16 +30,21 @@ struct MacroArgs {
sync_writes: bool,
#[darling(default)]
with_cached_flag: bool,
#[darling(default, rename = "type")]
cache_type: Option<String>,
#[darling(default, rename = "create")]
cache_create: Option<String>,
#[darling(default)]
ty: Option<String>,
#[darling(default)]
create: Option<String>,
#[darling(default)]
result_fallback: bool,
}

pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let attr_args = parse_macro_input!(args as AttributeArgs);
let attr_args = match NestedMeta::parse_meta_list(args.into()) {
Ok(v) => v,
Err(e) => {
return TokenStream::from(darling::Error::from(e).write_errors());
}
};
let args = match MacroArgs::from_list(&attr_args) {
Ok(v) => v,
Err(e) => {
Expand Down Expand Up @@ -86,21 +92,16 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
None => Ident::new(&fn_ident.to_string().to_uppercase(), fn_ident.span()),
};

let (cache_key_ty, key_convert_block) = make_cache_key_type(
&args.key,
&args.convert,
&args.cache_type,
input_tys,
&input_names,
);
let (cache_key_ty, key_convert_block) =
make_cache_key_type(&args.key, &args.convert, &args.ty, input_tys, &input_names);

// make the cache type and create statement
let (cache_ty, cache_create) = match (
&args.unbound,
&args.size,
&args.time,
&args.cache_type,
&args.cache_create,
&args.ty,
&args.create,
&args.time_refresh,
) {
(true, None, None, None, None, _) => {
Expand Down Expand Up @@ -130,12 +131,12 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
(cache_ty, cache_create)
}
(false, None, None, Some(type_str), Some(create_str), _) => {
let cache_type = parse_str::<Type>(type_str).expect("unable to parse cache type");
let ty = parse_str::<Type>(type_str).expect("unable to parse cache type");

let cache_create =
parse_str::<Block>(create_str).expect("unable to parse cache create block");

(quote! { #cache_type }, quote! { #cache_create })
(quote! { #ty }, quote! { #cache_create })
}
(false, None, None, Some(_), None, _) => {
panic!("type requires create to also be set")
Expand All @@ -153,9 +154,9 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
(false, false) => {
let set_cache_block = quote! { cache.cache_set(key, result.clone()); };
let return_cache_block = if args.with_cached_flag {
quote! { let mut r = result.clone(); r.was_cached = true; return r }
quote! { let mut r = result.to_owned(); r.was_cached = true; return r }
} else {
quote! { return result.clone() }
quote! { return result.to_owned() }
};
(set_cache_block, return_cache_block)
}
Expand All @@ -166,9 +167,9 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
}
};
let return_cache_block = if args.with_cached_flag {
quote! { let mut r = result.clone(); r.was_cached = true; return Ok(r) }
quote! { let mut r = result.to_owned(); r.was_cached = true; return Ok(r) }
} else {
quote! { return Ok(result.clone()) }
quote! { return Ok(result.to_owned()) }
};
(set_cache_block, return_cache_block)
}
Expand All @@ -179,7 +180,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
}
};
let return_cache_block = if args.with_cached_flag {
quote! { let mut r = result.clone(); r.was_cached = true; return Some(r) }
quote! { let mut r = result.to_owned(); r.was_cached = true; return Some(r) }
} else {
quote! { return Some(result.clone()) }
};
Expand All @@ -198,7 +199,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let lock;
let function_no_cache;
let function_call;
let cache_type;
let ty;
if asyncness.is_some() {
lock = quote! {
let mut cache = #cache_ident.lock().await;
Expand All @@ -212,7 +213,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let result = #no_cache_fn_ident(#(#input_names),*).await;
};

cache_type = quote! {
ty = quote! {
#visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(#cache_create));
};
} else {
Expand All @@ -228,7 +229,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let result = #no_cache_fn_ident(#(#input_names),*);
};

cache_type = quote! {
ty = quote! {
#visibility static #cache_ident: ::cached::once_cell::sync::Lazy<std::sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(#cache_create));
};
}
Expand Down Expand Up @@ -305,7 +306,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let expanded = quote! {
// Cached static
#[doc = #cache_ident_doc]
#cache_type
#ty
// No cache function (origin of the cached function)
#[doc = #no_cache_fn_indent_doc]
#visibility #function_no_cache
Expand Down
6 changes: 3 additions & 3 deletions cached_proc_macro/src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,11 @@ pub(super) fn find_value_type(
pub(super) fn make_cache_key_type(
key: &Option<String>,
convert: &Option<String>,
cache_type: &Option<String>,
ty: &Option<String>,
input_tys: Vec<Type>,
input_names: &Vec<Pat>,
) -> (TokenStream2, TokenStream2) {
match (key, convert, cache_type) {
match (key, convert, ty) {
(Some(key_str), Some(convert_str), _) => {
let cache_key_ty = parse_str::<Type>(key_str).expect("unable to parse cache key type");

Expand Down Expand Up @@ -145,7 +145,7 @@ pub(super) fn get_input_names(inputs: &Punctuated<FnArg, Comma>) -> Vec<Pat> {
}

pub(super) fn fill_in_attributes(attributes: &mut Vec<Attribute>, cache_fn_doc_extra: String) {
if attributes.iter().any(|attr| attr.path.is_ident("doc")) {
if attributes.iter().any(|attr| attr.path().is_ident("doc")) {
attributes.push(parse_quote! { #[doc = ""] });
attributes.push(parse_quote! { #[doc = "# Caching"] });
attributes.push(parse_quote! { #[doc = #cache_fn_doc_extra] });
Expand Down
68 changes: 33 additions & 35 deletions cached_proc_macro/src/io_cached.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
use crate::helpers::*;
use darling::ast::NestedMeta;
use darling::FromMeta;
use proc_macro::TokenStream;
use quote::quote;
use syn::spanned::Spanned;
use syn::{
parse_macro_input, parse_str, AttributeArgs, Block, ExprClosure, GenericArgument, Ident,
ItemFn, PathArguments, ReturnType, Type,
parse_macro_input, parse_str, Block, ExprClosure, GenericArgument, Ident, ItemFn,
PathArguments, ReturnType, Type,
};

#[derive(FromMeta)]
Expand All @@ -31,14 +32,19 @@ struct IOMacroArgs {
convert: Option<String>,
#[darling(default)]
with_cached_flag: bool,
#[darling(default, rename = "type")]
cache_type: Option<String>,
#[darling(default, rename = "create")]
cache_create: Option<String>,
#[darling(default)]
ty: Option<String>,
#[darling(default)]
create: Option<String>,
}

pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
let attr_args = parse_macro_input!(args as AttributeArgs);
let attr_args = match NestedMeta::parse_meta_list(args.into()) {
Ok(v) => v,
Err(e) => {
return TokenStream::from(darling::Error::from(e).write_errors());
}
};
let args = match IOMacroArgs::from_list(&attr_args) {
Ok(v) => v,
Err(e) => {
Expand Down Expand Up @@ -155,13 +161,8 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
};
let cache_name = cache_ident.to_string();

let (cache_key_ty, key_convert_block) = make_cache_key_type(
&args.key,
&args.convert,
&args.cache_type,
input_tys,
&input_names,
);
let (cache_key_ty, key_convert_block) =
make_cache_key_type(&args.key, &args.convert, &args.ty, input_tys, &input_names);

// make the cache type and create statement
let (cache_ty, cache_create) = match (
Expand All @@ -170,16 +171,15 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
&args.time,
&args.time_refresh,
&args.cache_prefix_block,
&args.cache_type,
&args.cache_create,
&args.ty,
&args.create,
) {
// redis
(true, false, time, time_refresh, cache_prefix, cache_type, cache_create) => {
let cache_ty = match cache_type {
Some(cache_type) => {
let cache_type =
parse_str::<Type>(cache_type).expect("unable to parse cache type");
quote! { #cache_type }
(true, false, time, time_refresh, cache_prefix, ty, cache_create) => {
let cache_ty = match ty {
Some(ty) => {
let ty = parse_str::<Type>(ty).expect("unable to parse cache type");
quote! { #ty }
}
None => {
if asyncness.is_some() {
Expand Down Expand Up @@ -242,12 +242,11 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
(cache_ty, cache_create)
}
// disk
(false, true, time, time_refresh, _, cache_type, cache_create) => {
let cache_ty = match cache_type {
Some(cache_type) => {
let cache_type =
parse_str::<Type>(cache_type).expect("unable to parse cache type");
quote! { #cache_type }
(false, true, time, time_refresh, _, ty, cache_create) => {
let cache_ty = match ty {
Some(ty) => {
let ty = parse_str::<Type>(ty).expect("unable to parse cache type");
quote! { #ty }
}
None => {
// https://github.com/spacejam/sled?tab=readme-ov-file#interaction-with-async
Expand Down Expand Up @@ -297,14 +296,13 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
};
(cache_ty, cache_create)
}
(_, _, time, time_refresh, cache_prefix, cache_type, cache_create) => {
let cache_ty = match cache_type {
Some(cache_type) => {
let cache_type =
parse_str::<Type>(cache_type).expect("unable to parse cache type");
quote! { #cache_type }
(_, _, time, time_refresh, cache_prefix, ty, cache_create) => {
let cache_ty = match ty {
Some(ty) => {
let ty = parse_str::<Type>(ty).expect("unable to parse cache type");
quote! { #ty }
}
None => panic!("#[io_cached] cache `type` must be specified"),
None => panic!("#[io_cached] cache `ty` must be specified"),
};
let cache_create = match cache_create {
Some(cache_create) => {
Expand Down
16 changes: 8 additions & 8 deletions cached_proc_macro/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,17 @@ use proc_macro::TokenStream;
/// - `time`: (optional, u64) specify a cache TTL in seconds, implies the cache type is a `TimedCache` or `TimedSizedCache`.
/// - `time_refresh`: (optional, bool) specify whether to refresh the TTL on cache hits.
/// - `sync_writes`: (optional, bool) specify whether to synchronize the execution of writing of uncached values.
/// - `type`: (optional, string type) The cache store type to use. Defaults to `UnboundCache`. When `unbound` is
/// - `ty`: (optional, string type) The cache store type to use. Defaults to `UnboundCache`. When `unbound` is
/// specified, defaults to `UnboundCache`. When `size` is specified, defaults to `SizedCache`.
/// When `time` is specified, defaults to `TimedCached`.
/// When `size` and `time` are specified, defaults to `TimedSizedCache`. When `type` is
/// When `size` and `time` are specified, defaults to `TimedSizedCache`. When `ty` is
/// specified, `create` must also be specified.
/// - `create`: (optional, string expr) specify an expression used to create a new cache store, e.g. `create = r##"{ CacheType::new() }"##`.
/// - `key`: (optional, string type) specify what type to use for the cache key, e.g. `key = "u32"`.
/// When `key` is specified, `convert` must also be specified.
/// - `convert`: (optional, string expr) specify an expression used to convert function arguments to a cache
/// key, e.g. `convert = r##"{ format!("{}:{}", arg1, arg2) }"##`. When `convert` is specified,
/// `key` or `type` must also be set.
/// `key` or `ty` must also be set.
/// - `result`: (optional, bool) If your function returns a `Result`, only cache `Ok` values returned by the function.
/// - `option`: (optional, bool) If your function returns an `Option`, only cache `Some` values returned by the function.
/// - `with_cached_flag`: (optional, bool) If your function returns a `cached::Return` or `Result<cached::Return, E>`,
Expand All @@ -35,7 +35,7 @@ use proc_macro::TokenStream;
/// *Note*, this option requires the cache type implements `CloneCached`.
///
/// ## Note
/// The `type`, `create`, `key`, and `convert` attributes must be in a `String`
/// The `ty`, `create`, `key`, and `convert` attributes must be in a `String`
/// This is because darling, which is used for parsing the attributes, does not support directly parsing
/// attributes into `Type`s or `Block`s.
#[proc_macro_attribute]
Expand Down Expand Up @@ -71,23 +71,23 @@ pub fn once(args: TokenStream, input: TokenStream) -> TokenStream {
/// - `disk`: (optional, bool) use a `DiskCache`, this must be set to true even if `type` and `create` are specified.
/// - `time`: (optional, u64) specify a cache TTL in seconds, implies the cache type is a `TimedCached` or `TimedSizedCache`.
/// - `time_refresh`: (optional, bool) specify whether to refresh the TTL on cache hits.
/// - `type`: (optional, string type) explicitly specify the cache store type to use.
/// - `ty`: (optional, string type) explicitly specify the cache store type to use.
/// - `cache_prefix_block`: (optional, string expr) specify an expression used to create the string used as a
/// prefix for all cache keys of this function, e.g. `cache_prefix_block = r##"{ "my_prefix" }"##`.
/// When not specified, the cache prefix will be constructed from the name of the function. This
/// could result in unexpected conflicts between io_cached-functions of the same name, so it's
/// recommended that you specify a prefix you're sure will be unique.
/// - `create`: (optional, string expr) specify an expression used to create a new cache store, e.g. `create = r##"{ CacheType::new() }"##`.
/// - `key`: (optional, string type) specify what type to use for the cache key, e.g. `type = "TimedCached<u32, u32>"`.
/// - `key`: (optional, string type) specify what type to use for the cache key, e.g. `ty = "TimedCached<u32, u32>"`.
/// When `key` is specified, `convert` must also be specified.
/// - `convert`: (optional, string expr) specify an expression used to convert function arguments to a cache
/// key, e.g. `convert = r##"{ format!("{}:{}", arg1, arg2) }"##`. When `convert` is specified,
/// `key` or `type` must also be set.
/// `key` or `ty` must also be set.
/// - `with_cached_flag`: (optional, bool) If your function returns a `cached::Return` or `Result<cached::Return, E>`,
/// the `cached::Return.was_cached` flag will be updated when a cached value is returned.
///
/// ## Note
/// The `type`, `create`, `key`, and `convert` attributes must be in a `String`
/// The `ty`, `create`, `key`, and `convert` attributes must be in a `String`
/// This is because darling, which is used for parsing the attributes, does not support directly parsing
/// attributes into `Type`s or `Block`s.
#[proc_macro_attribute]
Expand Down
Loading

0 comments on commit fca60ec

Please sign in to comment.