Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Sync some part of the cache types (cached, io_cached and once) #218

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 19 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,28 @@ async = ["futures", "tokio", "async-trait"]
async_tokio_rt_multi_thread = ["async", "tokio/rt-multi-thread"]
redis_store = ["redis", "r2d2", "serde", "serde_json"]
redis_connection_manager = ["redis_store", "redis/connection-manager"]
redis_async_std = ["redis_store", "async", "redis/aio", "redis/async-std-comp", "redis/tls", "redis/async-std-tls-comp"]
redis_tokio = ["redis_store", "async", "redis/aio", "redis/tokio-comp", "redis/tls", "redis/tokio-native-tls-comp"]
redis_async_std = [
"redis_store",
"async",
"redis/aio",
"redis/async-std-comp",
"redis/tls",
"redis/async-std-tls-comp",
]
redis_tokio = [
"redis_store",
"async",
"redis/aio",
"redis/tokio-comp",
"redis/tls",
"redis/tokio-native-tls-comp",
]
redis_ahash = ["redis_store", "redis/ahash"]
disk_store = ["sled", "serde", "rmp-serde", "directories"]
wasm = []

[dependencies.cached_proc_macro]
version = "0.22.0"
version = "0.23.0"
path = "cached_proc_macro"
optional = true

Expand Down Expand Up @@ -65,7 +79,7 @@ version = "0.1"
optional = true

[dependencies.redis]
version = "0.25"
version = "0.26"
features = ["r2d2"]
optional = true

Expand Down Expand Up @@ -104,7 +118,7 @@ version = "^1.1.0"

[dev-dependencies]
copy_dir = "0.1.3"
googletest = "0.11.0"
googletest = "0.12.0"
tempfile = "3.10.1"

[dev-dependencies.async-std]
Expand Down
2 changes: 1 addition & 1 deletion cached_proc_macro/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "cached_proc_macro"
version = "0.22.0"
version = "0.23.0"
authors = ["csos95 <csoscss@gmail.com>", "James Kominick <james@kominick.com>"]
description = "Generic cache implementations and simplified function memoization"
repository = "https://github.com/jaemk/cached"
Expand Down
1 change: 1 addition & 0 deletions cached_proc_macro/src/cached.rs
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,7 @@ pub fn cached(args: TokenStream, input: TokenStream) -> TokenStream {
#ty
// No cache function (origin of the cached function)
#[doc = #no_cache_fn_indent_doc]
#(#attributes)*
#visibility #function_no_cache
// Cached function
#(#attributes)*
Expand Down
239 changes: 124 additions & 115 deletions cached_proc_macro/src/io_cached.rs
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
let inputs = signature.inputs.clone();
let output = signature.output.clone();
let asyncness = signature.asyncness;
let generics = signature.generics.clone();

let input_tys = get_input_types(&inputs);

let input_names = get_input_names(&inputs);

// pull out the output type
Expand Down Expand Up @@ -290,47 +290,32 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
}
}
None => {
let create = quote! {
let mut create = quote! {
cached::DiskCache::new(#cache_name)
};
let create = match time {
None => create,
Some(time) => {
quote! {
(#create).set_lifespan(#time)
}
}
};
let create = match time_refresh {
None => create,
Some(time_refresh) => {
quote! {
(#create).set_refresh(#time_refresh)
}
}
};
let create = match sync_to_disk_on_cache_change {
None => create,
Some(sync_to_disk_on_cache_change) => {
quote! {
(#create).set_sync_to_disk_on_cache_change(#sync_to_disk_on_cache_change)
}
}
if let Some(time) = time {
create = quote! {
(#create).set_lifespan(#time)
};
};
let create = match connection_config {
None => create,
Some(connection_config) => {
quote! {
(#create).set_connection_config(#connection_config)
}
}
if let Some(time_refresh) = time_refresh {
create = quote! {
(#create).set_refresh(#time_refresh)
};
}
if let Some(sync_to_disk_on_cache_change) = sync_to_disk_on_cache_change {
create = quote! {
(#create).set_sync_to_disk_on_cache_change(#sync_to_disk_on_cache_change)
};
};
let create = match args.disk_dir {
None => create,
Some(disk_dir) => {
quote! { (#create).set_disk_directory(#disk_dir) }
}
if let Some(connection_config) = connection_config {
create = quote! {
(#create).set_connection_config(#connection_config)
};
};
if let Some(disk_dir) = args.disk_dir {
create = quote! { (#create).set_disk_directory(#disk_dir) };
}
quote! { (#create).build().expect("error constructing DiskCache in #[io_cached] macro") }
}
};
Expand Down Expand Up @@ -407,26 +392,13 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
(set_cache_block, return_cache_block)
};

let do_set_return_block = if asyncness.is_some() {
quote! {
// run the function and cache the result
async fn inner(#inputs) #output #body;
let result = inner(#(#input_names),*).await;
let cache = &#cache_ident.get_or_init(init).await;
#set_cache_block
result
}
} else {
quote! {
// run the function and cache the result
fn inner(#inputs) #output #body;
let result = inner(#(#input_names),*);
let cache = &#cache_ident;
#set_cache_block
result
}
let set_cache_and_return = quote! {
#set_cache_block
result
};

let no_cache_fn_ident = Ident::new(&format!("{}_no_cache", &fn_ident), fn_ident.span());

let signature_no_muts = get_mut_signature(signature);

// create a signature for the cache-priming function
Expand All @@ -436,23 +408,14 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {

// make cached static, cached function and prime cached function doc comments
let cache_ident_doc = format!("Cached static for the [`{}`] function.", fn_ident);
let no_cache_fn_indent_doc = format!("Origin of the cached function [`{}`].", fn_ident);
let prime_fn_indent_doc = format!("Primes the cached function [`{}`].", fn_ident);
let cache_fn_doc_extra = format!(
"This is a cached function that uses the [`{}`] cached static.",
cache_ident
);
fill_in_attributes(&mut attributes, cache_fn_doc_extra);

let async_trait = if asyncness.is_some() && !args.disk {
quote! {
use cached::IOCachedAsync;
}
} else {
quote! {
use cached::IOCached;
}
};

let async_cache_get_return = if asyncness.is_some() && !args.disk {
quote! {
if let Some(result) = cache.cache_get(&key).await.map_err(#map_error)? {
Expand All @@ -466,62 +429,108 @@ pub fn io_cached(args: TokenStream, input: TokenStream) -> TokenStream {
}
}
};
// put it all together
let expanded = if asyncness.is_some() {
quote! {
// Cached static
#[doc = #cache_ident_doc]

let use_trait = if asyncness.is_some() && !args.disk {
quote! { use cached::IOCachedAsync; }
} else {
quote! { use cached::IOCached; }
};

let init;
let function_no_cache;
let function_call;
let ty;
let logic;
if asyncness.is_some() {
init = quote! {
let init = || async { #cache_create };
};

function_no_cache = quote! {
async fn #no_cache_fn_ident #generics (#inputs) #output #body
};

function_call = quote! {
let result = #no_cache_fn_ident(#(#input_names),*).await;
};

ty = quote! {
#visibility static #cache_ident: ::cached::async_sync::OnceCell<#cache_ty> = ::cached::async_sync::OnceCell::const_new();
// Cached function
#(#attributes)*
#visibility #signature_no_muts {
let init = || async { #cache_create };
#async_trait
let key = #key_convert_block;
{
// check if the result is cached
let cache = &#cache_ident.get_or_init(init).await;
#async_cache_get_return
}
#do_set_return_block
}
// Prime cached function
#[doc = #prime_fn_indent_doc]
#[allow(dead_code)]
#visibility #prime_sig {
#async_trait
let init = || async { #cache_create };
let key = #key_convert_block;
#do_set_return_block
};

logic = quote! {
let cache = &#cache_ident.get_or_init(init).await;
#async_cache_get_return
};
} else {
init = quote! {};

function_no_cache = quote! {
fn #no_cache_fn_ident #generics (#inputs) #output #body
};

function_call = quote! {
let result = #no_cache_fn_ident(#(#input_names),*);
};

ty = quote! {
#visibility static #cache_ident: ::cached::once_cell::sync::Lazy<#cache_ty> = ::cached::once_cell::sync::Lazy::new(|| #cache_create);
};

logic = quote! {
let cache = &#cache_ident;
if let Some(result) = cache.cache_get(&key).map_err(#map_error)? {
#return_cache_block
}
};
}

let do_set_return_block = if asyncness.is_some() {
quote! {
// run the function and cache the result
#function_call
let cache = &#cache_ident.get_or_init(init).await;
#set_cache_and_return
}
} else {
quote! {
// Cached static
#[doc = #cache_ident_doc]
#visibility static #cache_ident: ::cached::once_cell::sync::Lazy<#cache_ty> = ::cached::once_cell::sync::Lazy::new(|| #cache_create);
// Cached function
#(#attributes)*
#visibility #signature_no_muts {
use cached::IOCached;
let key = #key_convert_block;
{
// check if the result is cached
let cache = &#cache_ident;
if let Some(result) = cache.cache_get(&key).map_err(#map_error)? {
#return_cache_block
}
}
#do_set_return_block
}
// Prime cached function
#[doc = #prime_fn_indent_doc]
#[allow(dead_code)]
#visibility #prime_sig {
use cached::IOCached;
let key = #key_convert_block;
#do_set_return_block
// run the function and cache the result
#function_call
let cache = &#cache_ident;
#set_cache_and_return
}
};

// put it all together
let expanded = quote! {
// Cached static
#[doc = #cache_ident_doc]
#ty
// No cache function (origin of the cached function)
#[doc = #no_cache_fn_indent_doc]
#(#attributes)*
#visibility #function_no_cache
// Cached function
#(#attributes)*
#visibility #signature_no_muts {
#init
#use_trait
let key = #key_convert_block;
{
// check if the result is cached
#logic
}
#do_set_return_block
}
// Prime cached function
#[doc = #prime_fn_indent_doc]
#[allow(dead_code)]
#(#attributes)*
#visibility #prime_sig {
#use_trait
#init
let key = #key_convert_block;
#do_set_return_block
}
};

Expand Down
Loading
Loading