Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,11 @@ jobs:
- uses: actions/checkout@v2
- name: Build
run: cargo build
- name: Run tests
run: cargo test --features "ttl-cache","lru-cache"
- name: Run tests no-features
run: cargo test
- name: Run tests lru-feature
run: cargo test --features lru-cache
- name: Run tests ttl-feature
run: cargo test --features ttl-cache
- name: Run tests all-features
run: cargo test --features lru-cache,ttl-cache
3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ thiserror = "1.0"
# Optional feature based dependencies
lru = { version = "0.6.5", optional = true }

[dev-dependencies]
cache_loader_async_macros = { path = "./cache-loader-async-macros" }

[features]
default = []
lru-cache = ["lru"]
Expand Down
14 changes: 14 additions & 0 deletions cache-loader-async-macros/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
[package]
name = "cache_loader_async_macros"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[lib]
proc-macro = true

[dependencies]
syn = { version = "1.0", features = ["full"] }
quote = "1.0"
proc-macro2 = "1.0"
113 changes: 113 additions & 0 deletions cache-loader-async-macros/src/lib.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
extern crate proc_macro;

use quote::quote;
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
use proc_macro2::token_stream::IntoIter;

fn collect_diamond_idents(stream: &mut IntoIter) -> Vec<Ident> {
let mut idents = Vec::new();
if let TokenTree::Punct(punct) = stream.next().expect("Missing next element") {
if !punct.as_char().eq(&'<') {
panic!("Invalid diamond start");
}
} else {
panic!("Invalid diamond start");
}
let mut expect_ident = true;
loop {
match stream.next().expect("Missing next element") {
TokenTree::Ident(ident) => {
if expect_ident {
expect_ident = false;
idents.push(ident);
} else {
panic!("Invalid diamond format! (Didn't expect ident)");
}
},
TokenTree::Punct(punct) => {
if !expect_ident {
if punct.as_char().eq(&',') {
expect_ident = true;
} else if punct.as_char().eq(&'>') {
break;
} else {
panic!("Invalid diamond format! (Invalid punct)");
}
} else {
panic!("Invalid diamond format! (Didn't expect punct)");
}
}
_ => panic!("Invalid type"),
}
}
idents
}

#[proc_macro]
pub fn test_with_features(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut stream = TokenStream::from(item).into_iter();
let fn_ident = if let TokenTree::Ident(ident) = stream.next().expect("First token mandatory") {
ident
} else {
panic!("First token must be an ident!");
};
let ident = if let TokenTree::Ident(ident) = stream.next().expect("Second token mandatory") {
ident
} else {
panic!("Second token must be an ident!");
};
let types = collect_diamond_idents(&mut stream);
let loader = if let TokenTree::Group(group) = stream.next().expect("Missing group token") {
group
} else {
panic!("Group token not present");
};

let mut fn_body = quote! {};
while let Some(token) = stream.next() {
fn_body = quote! {
#fn_body #token
}
}

let key_type = types.get(0).unwrap();
let value_type = types.get(1).unwrap();
let error_type = types.get(2).unwrap();

let fn_ident_default = syn::Ident::new(&format!("test_default_{}", fn_ident), Span::call_site());
let fn_ident_lru = syn::Ident::new(&format!("test_lru_{}", fn_ident), Span::call_site());
let fn_ident_ttl = syn::Ident::new(&format!("test_ttl_{}", fn_ident), Span::call_site());

let result = quote! {
#[tokio::test]
async fn #fn_ident_default() {
let #ident: LoadingCache<#key_type, #value_type, #error_type> = LoadingCache::new(move |key: #key_type| {
async move #loader
});

#fn_body
}

#[cfg(feature = "lru-cache")]
#[tokio::test]
async fn #fn_ident_lru() {
let #ident: LoadingCache<#key_type, #value_type, #error_type> = LoadingCache::with_backing(LruCacheBacking::new(100), move |key: #key_type| {
async move #loader
});

#fn_body
}

#[cfg(feature = "ttl-cache")]
#[tokio::test]
async fn #fn_ident_ttl() {
let #ident: LoadingCache<#key_type, #value_type, #error_type> = LoadingCache::with_backing(TtlCacheBacking::new(Duration::from_secs(3)), move |key: #key_type| {
async move #loader
});

#fn_body
}
};

return result.into();
}
118 changes: 99 additions & 19 deletions src/backing.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ pub trait CacheBacking<K, V>

#[cfg(feature = "lru-cache")]
pub struct LruCacheBacking<K, V> {
lru: LruCache<K, V>
lru: LruCache<K, V>,
}

#[cfg(feature = "lru-cache")]
Expand Down Expand Up @@ -93,10 +93,27 @@ impl<
#[cfg(feature = "ttl-cache")]
pub struct TtlCacheBacking<K, V> {
ttl: Duration,
expiry_queue: VecDeque<(K, Instant)>,
expiry_queue: VecDeque<TTlEntry<K>>,
map: HashMap<K, (V, Instant)>,
}

#[cfg(feature = "ttl-cache")]
struct TTlEntry<K> {
key: K,
expiry: Instant,

}

#[cfg(feature = "ttl-cache")]
impl<K> From<(K, Instant)> for TTlEntry<K> {
fn from(tuple: (K, Instant)) -> Self {
Self {
key: tuple.0,
expiry: tuple.1,
}
}
}

#[cfg(feature = "ttl-cache")]
impl<
K: Eq + Hash + Sized + Clone + Send,
Expand All @@ -117,21 +134,21 @@ impl<
fn set(&mut self, key: K, value: V) -> Option<V> {
self.remove_old();
let expiry = Instant::now().add(self.ttl);
let option = self.map.insert(key.clone(), (value, expiry));
if option.is_some() {
self.expiry_queue.retain(|(vec_key, _)| vec_key.ne(&key));
let result = self.replace(key.clone(), value, expiry);
match self.expiry_queue.binary_search_by_key(&expiry, |entry| entry.expiry) {
Ok(found) => {
self.expiry_queue.insert(found + 1, (key, expiry).into());
}
Err(idx) => {
self.expiry_queue.insert(idx, (key, expiry).into());
}
}
self.expiry_queue.push_back((key, expiry));
option.map(|(value, _)| value)
result
}

fn remove(&mut self, key: &K) -> Option<V> {
self.remove_old();
let option = self.map.remove(key);
if option.is_some() {
self.expiry_queue.retain(|(vec_key, _)| vec_key.ne(&key));
}
option.map(|(value, _)| value)
self.remove_key(key)
}

fn contains_key(&self, key: &K) -> bool {
Expand All @@ -154,7 +171,8 @@ impl<
.collect::<Vec<K>>();
for key in keys.into_iter() {
self.map.remove(&key);
self.expiry_queue.retain(|(expiry_key, _)| expiry_key.ne(&key))
// optimize looping through expiry_queue multiple times?
self.expiry_queue.retain(|entry| entry.key.ne(&key))
}
}

Expand All @@ -165,7 +183,7 @@ impl<
}

#[cfg(feature = "ttl-cache")]
impl<K: Hash + Sized + PartialEq + Eq, V> TtlCacheBacking<K, V> {
impl<K: Eq + Hash + Sized + Clone + Send, V: Sized + Clone + Send> TtlCacheBacking<K, V> {
pub fn new(ttl: Duration) -> TtlCacheBacking<K, V> {
TtlCacheBacking {
ttl,
Expand All @@ -176,18 +194,80 @@ impl<K: Hash + Sized + PartialEq + Eq, V> TtlCacheBacking<K, V> {

fn remove_old(&mut self) {
let now = Instant::now();
while let Some((key, expiry)) = self.expiry_queue.pop_front() {
if now.lt(&expiry) {
self.expiry_queue.push_front((key, expiry));
while let Some(entry) = self.expiry_queue.pop_front() {
if now.lt(&entry.expiry) {
self.expiry_queue.push_front(entry);
break;
}
self.map.remove(&key);
self.map.remove(&entry.key);
}
}

fn replace(&mut self, key: K, value: V, expiry: Instant) -> Option<V> {
let entry = self.map.insert(key.clone(), (value, expiry));
self.cleanup_expiry(entry, &key)
}

fn remove_key(&mut self, key: &K) -> Option<V> {
let entry = self.map.remove(key);
self.cleanup_expiry(entry, key)
}

fn cleanup_expiry(&mut self, entry: Option<(V, Instant)>, key: &K) -> Option<V> {
if let Some((value, old_expiry)) = entry {
match self.expiry_queue.binary_search_by_key(&old_expiry, |entry| entry.expiry) {
Ok(found) => {
let index = self.expiry_index_on_key_eq(found, &old_expiry, key);
if let Some(index) = index {
self.expiry_queue.remove(index);
} else {
// expiry not found (key)???
}
}
Err(_) => {
// expiry not found???
}
}
Some(value)
} else {
None
}
}

fn expiry_index_on_key_eq(&self, idx: usize, expiry: &Instant, key: &K) -> Option<usize> {
let entry = self.expiry_queue.get(idx).unwrap();
if entry.key.eq(key) {
return Some(idx);
}

let mut offset = 0;
while idx - offset > 0 {
offset += 1;
let entry = self.expiry_queue.get(idx - offset).unwrap();
if !entry.expiry.eq(expiry) {
break;
}
if entry.key.eq(key) {
return Some(idx - offset);
}
}
offset = 0;
while idx + offset < self.expiry_queue.len() {
offset += 1;
let entry = self.expiry_queue.get(idx + offset).unwrap();
if !entry.expiry.eq(expiry) {
break;
}
if entry.key.eq(key) {
return Some(idx + offset);
}
}
None
}
}

pub struct HashMapBacking<K, V> {
map: HashMap<K, V>
map: HashMap<K, V>,
}

impl<
Expand Down
Loading