Skip to content

Commit

Permalink
Merge branch 'master' into cache-remove-expiration
Browse files Browse the repository at this point in the history
  • Loading branch information
jaemk authored Apr 26, 2024
2 parents 05c9510 + fdbfd57 commit 8d065d2
Show file tree
Hide file tree
Showing 18 changed files with 757 additions and 192 deletions.
14 changes: 14 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,21 @@

## [Unreleased]
## Added
- Add `DiskCacheBuilder::set_sync_to_disk_on_cache_change` to specify that the cache changes should be written to disk on every cache change.
- Add `sync_to_disk_on_cache_change` to `#[io_cached]` to allow setting `DiskCacheBuilder::set_sync_to_disk_on_cache_change` from the proc macro.
- Add `DiskCacheBuilder::set_connection_config` to give more control over the sled connection.
- Add `connection_config` to `#[io_cached]` to allow setting `DiskCacheBuilder::set_connection_config` from the proc macro.
- Add `DiskCache::connection()` and `DiskCache::connection_mut()` to give access to the underlying sled connection.
## Changed
- [Breaking] `type` attribute is now `ty`
- Upgrade to syn2
- Signature or `DiskCache::remove_expired_entries`: this now returns `Result<(), DiskCacheError>` instead of `()`, returning an `Err(sled::Error)` on removing and flushing from the connection.
## Removed

## [0.49.3]
## Added
## Changed
- Fix `DiskCache` expired value logic
## Removed

## [0.49.2]
Expand Down
13 changes: 9 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "cached"
version = "0.49.2"
version = "0.49.3"
authors = ["James Kominick <james@kominick.com>"]
description = "Generic cache implementations and simplified function memoization"
repository = "https://github.com/jaemk/cached"
Expand Down Expand Up @@ -65,7 +65,7 @@ version = "0.1"
optional = true

[dependencies.redis]
version = "0.24"
version = "0.25"
features = ["r2d2"]
optional = true

Expand All @@ -78,7 +78,7 @@ version = "1.1"
optional = true

[dependencies.directories]
version = "4.0"
version = "5.0"
optional = true

[dependencies.r2d2]
Expand All @@ -102,6 +102,11 @@ optional = true
[dependencies.instant]
version = "0.1"

[dev-dependencies]
copy_dir = "0.1.3"
googletest = "0.11.0"
tempfile = "3.10.1"

[dev-dependencies.async-std]
version = "1.6"
features = ["attributes"]
Expand All @@ -110,7 +115,7 @@ features = ["attributes"]
version = "1"

[dev-dependencies.serial_test]
version = "2"
version = "3"

[workspace]
members = ["cached_proc_macro", "examples/wasm"]
Expand Down
20 changes: 18 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ use cached::SizedCache;

/// Use an explicit cache-type with a custom creation block and custom cache-key generating block
#[cached(
type = "SizedCache<String, usize>",
ty = "SizedCache<String, usize>",
create = "{ SizedCache::with_size(100) }",
convert = r#"{ format!("{}{}", a, b) }"#
)]
Expand Down Expand Up @@ -105,6 +105,22 @@ fn keyed(a: String) -> Option<usize> {

----

```compile_fail
use cached::proc_macro::cached;
/// Cannot use sync_writes and result_fallback together
#[cached(
result = true,
time = 1,
sync_writes = true,
result_fallback = true
)]
fn doesnt_compile() -> Result<String, ()> {
Ok("a".to_string())
}
```
----

```rust,no_run,ignore
use cached::proc_macro::io_cached;
use cached::AsyncRedisCache;
Expand All @@ -123,7 +139,7 @@ enum ExampleError {
/// by your function. All `io_cached` functions must return `Result`s.
#[io_cached(
map_error = r##"|e| ExampleError::RedisError(format!("{:?}", e))"##,
type = "AsyncRedisCache<u64, String>",
ty = "AsyncRedisCache<u64, String>",
create = r##" {
AsyncRedisCache::new("cached_redis_prefix", 1)
.set_refresh(true)
Expand Down
7 changes: 2 additions & 5 deletions cached_proc_macro/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,6 @@ proc-macro = true

[dependencies]
quote = "1.0.6"
darling = "0.14.2"
darling = "0.20.8"
proc-macro2 = "1.0.49"

[dependencies.syn]
version = "1.0.27"
features = ["full"]
syn = "2.0.52"
59 changes: 32 additions & 27 deletions cached_proc_macro/src/cached.rs
Original file line number Diff line number Diff line change
@@ -1,9 +1,10 @@
use crate::helpers::*;
use darling::ast::NestedMeta;
use darling::FromMeta;
use proc_macro::TokenStream;
use quote::quote;
use syn::spanned::Spanned;
use syn::{parse_macro_input, parse_str, AttributeArgs, Block, Ident, ItemFn, ReturnType, Type};
use syn::{parse_macro_input, parse_str, Block, Ident, ItemFn, ReturnType, Type};

#[derive(FromMeta)]
struct MacroArgs {
Expand All @@ -29,16 +30,21 @@ struct MacroArgs {
sync_writes: bool,
#[darling(default)]
with_cached_flag: bool,
#[darling(default, rename = "type")]
cache_type: Option<String>,
#[darling(default, rename = "create")]
cache_create: Option<String>,
#[darling(default)]
ty: Option<String>,
#[darling(default)]
create: Option<String>,
#[darling(default)]
result_fallback: bool,
}

pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let attr_args = parse_macro_input!(args as AttributeArgs);
let attr_args = match NestedMeta::parse_meta_list(args.into()) {
Ok(v) => v,
Err(e) => {
return TokenStream::from(darling::Error::from(e).write_errors());
}
};
let args = match MacroArgs::from_list(&attr_args) {
Ok(v) => v,
Err(e) => {
Expand Down Expand Up @@ -86,21 +92,16 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
None => Ident::new(&fn_ident.to_string().to_uppercase(), fn_ident.span()),
};

let (cache_key_ty, key_convert_block) = make_cache_key_type(
&args.key,
&args.convert,
&args.cache_type,
input_tys,
&input_names,
);
let (cache_key_ty, key_convert_block) =
make_cache_key_type(&args.key, &args.convert, &args.ty, input_tys, &input_names);

// make the cache type and create statement
let (cache_ty, cache_create) = match (
&args.unbound,
&args.size,
&args.time,
&args.cache_type,
&args.cache_create,
&args.ty,
&args.create,
&args.time_refresh,
) {
(true, None, None, None, None, _) => {
Expand Down Expand Up @@ -130,12 +131,12 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
(cache_ty, cache_create)
}
(false, None, None, Some(type_str), Some(create_str), _) => {
let cache_type = parse_str::<Type>(type_str).expect("unable to parse cache type");
let ty = parse_str::<Type>(type_str).expect("unable to parse cache type");

let cache_create =
parse_str::<Block>(create_str).expect("unable to parse cache create block");

(quote! { #cache_type }, quote! { #cache_create })
(quote! { #ty }, quote! { #cache_create })
}
(false, None, None, Some(_), None, _) => {
panic!("type requires create to also be set")
Expand All @@ -153,9 +154,9 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
(false, false) => {
let set_cache_block = quote! { cache.cache_set(key, result.clone()); };
let return_cache_block = if args.with_cached_flag {
quote! { let mut r = result.clone(); r.was_cached = true; return r }
quote! { let mut r = result.to_owned(); r.was_cached = true; return r }
} else {
quote! { return result.clone() }
quote! { return result.to_owned() }
};
(set_cache_block, return_cache_block)
}
Expand All @@ -166,9 +167,9 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
}
};
let return_cache_block = if args.with_cached_flag {
quote! { let mut r = result.clone(); r.was_cached = true; return Ok(r) }
quote! { let mut r = result.to_owned(); r.was_cached = true; return Ok(r) }
} else {
quote! { return Ok(result.clone()) }
quote! { return Ok(result.to_owned()) }
};
(set_cache_block, return_cache_block)
}
Expand All @@ -179,7 +180,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
}
};
let return_cache_block = if args.with_cached_flag {
quote! { let mut r = result.clone(); r.was_cached = true; return Some(r) }
quote! { let mut r = result.to_owned(); r.was_cached = true; return Some(r) }
} else {
quote! { return Some(result.clone()) }
};
Expand All @@ -188,6 +189,10 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
_ => panic!("the result and option attributes are mutually exclusive"),
};

if args.result_fallback && args.sync_writes {
panic!("the result_fallback and sync_writes attributes are mutually exclusive");
}

let set_cache_and_return = quote! {
#set_cache_block
result
Expand All @@ -198,7 +203,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let lock;
let function_no_cache;
let function_call;
let cache_type;
let ty;
if asyncness.is_some() {
lock = quote! {
let mut cache = #cache_ident.lock().await;
Expand All @@ -212,7 +217,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let result = #no_cache_fn_ident(#(#input_names),*).await;
};

cache_type = quote! {
ty = quote! {
#visibility static #cache_ident: ::cached::once_cell::sync::Lazy<::cached::async_sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| ::cached::async_sync::Mutex::new(#cache_create));
};
} else {
Expand All @@ -228,7 +233,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let result = #no_cache_fn_ident(#(#input_names),*);
};

cache_type = quote! {
ty = quote! {
#visibility static #cache_ident: ::cached::once_cell::sync::Lazy<std::sync::Mutex<#cache_ty>> = ::cached::once_cell::sync::Lazy::new(|| std::sync::Mutex::new(#cache_create));
};
}
Expand All @@ -255,7 +260,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let old_val = {
#lock
let (result, has_expired) = cache.cache_get_expired(&key);
if let (Some(result), false) = (result, has_expired) {
if let (Some(result), false) = (&result, has_expired) {
#return_cache_block
}
result
Expand Down Expand Up @@ -305,7 +310,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
let expanded = quote! {
// Cached static
#[doc = #cache_ident_doc]
#cache_type
#ty
// No cache function (origin of the cached function)
#[doc = #no_cache_fn_indent_doc]
#visibility #function_no_cache
Expand Down
6 changes: 3 additions & 3 deletions cached_proc_macro/src/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,11 @@ pub(super) fn find_value_type(
pub(super) fn make_cache_key_type(
key: &Option<String>,
convert: &Option<String>,
cache_type: &Option<String>,
ty: &Option<String>,
input_tys: Vec<Type>,
input_names: &Vec<Pat>,
) -> (TokenStream2, TokenStream2) {
match (key, convert, cache_type) {
match (key, convert, ty) {
(Some(key_str), Some(convert_str), _) => {
let cache_key_ty = parse_str::<Type>(key_str).expect("unable to parse cache key type");

Expand Down Expand Up @@ -145,7 +145,7 @@ pub(super) fn get_input_names(inputs: &Punctuated<FnArg, Comma>) -> Vec<Pat> {
}

pub(super) fn fill_in_attributes(attributes: &mut Vec<Attribute>, cache_fn_doc_extra: String) {
if attributes.iter().any(|attr| attr.path.is_ident("doc")) {
if attributes.iter().any(|attr| attr.path().is_ident("doc")) {
attributes.push(parse_quote! { #[doc = ""] });
attributes.push(parse_quote! { #[doc = "# Caching"] });
attributes.push(parse_quote! { #[doc = #cache_fn_doc_extra] });
Expand Down
Loading

0 comments on commit 8d065d2

Please sign in to comment.