Skip to content

Commit

Permalink
bumpup versions
Browse files Browse the repository at this point in the history
  • Loading branch information
al8n committed Jan 10, 2025
1 parent 07ee48a commit bc185b4
Show file tree
Hide file tree
Showing 10 changed files with 71 additions and 53 deletions.
22 changes: 11 additions & 11 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "caches"
version = "0.2.9"
version = "0.3.0"
authors = ["Al Liu <scygliu1@gmail.com>"]
description = "This is a Rust implementation for popular caches (support no_std)."
homepage = "https://github.com/al8n/caches-rs"
Expand Down Expand Up @@ -42,18 +42,18 @@ nightly = ["rand/nightly"]

[dependencies]
bitvec = { version = "1", default-features = false }
cfg-if = "1.0.0"
hashbrown = { version = "0.14", optional = true }
libm = {version = "0.2.8", optional = true}
rand = {version = "0.8", optional = true}
cfg-if = "1"
hashbrown = { version = "0.15", optional = true }
libm = { version = "0.2", optional = true }
rand = { version = "0.8", optional = true }

[dev-dependencies]
scoped_threadpool = "0.1.*"
stats_alloc = "0.1.*"
criterion = "0.5.1"
fnv = "1.0.7"
rand = "0.8.4"
rustc-hash = "1.0"
scoped_threadpool = "0.1"
stats_alloc = "0.1"
criterion = "0.5"
fnv = "1"
rand = "0.8"
rustc-hash = "1"
cascara = "0.1.0"

[package.metadata.docs.rs]
Expand Down
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,19 @@ The MSRV for this crate is 1.55.0.
- `TinyLFU`, `SampledLFU`, and `WTinyLFUCache`

## Installation

- std

```toml
[dependencies]
caches = "0.2"
caches = "0.3"
```

- no_std

```toml
[dependencies]
caches = {version: "0.2", default-features = false }
caches = { version: "0.3", default-features = false, features = ["libm", "hashbrown"] }
```

## Usages
Expand Down
32 changes: 22 additions & 10 deletions benches/wtinylfu_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,17 @@ fn bench_wtinylfu_cache_fx_hasher(c: &mut Criterion) {
.collect(),
);

let builder = WTinyLFUCacheBuilder::<u64, DefaultKeyHasher<u64>, BuildHasherDefault<FxHasher>, BuildHasherDefault<FxHasher>, BuildHasherDefault<FxHasher>>::new(82, 6488, 1622, 8192)
.set_window_hasher(BuildHasherDefault::<FxHasher>::default())
.set_protected_hasher(BuildHasherDefault::<FxHasher>::default())
.set_probationary_hasher(BuildHasherDefault::<FxHasher>::default())
.set_key_hasher(DefaultKeyHasher::default());
let builder = WTinyLFUCacheBuilder::<
u64,
DefaultKeyHasher<u64>,
BuildHasherDefault<FxHasher>,
BuildHasherDefault<FxHasher>,
BuildHasherDefault<FxHasher>,
>::new(82, 6488, 1622, 8192)
.set_window_hasher(BuildHasherDefault::<FxHasher>::default())
.set_protected_hasher(BuildHasherDefault::<FxHasher>::default())
.set_probationary_hasher(BuildHasherDefault::<FxHasher>::default())
.set_key_hasher(DefaultKeyHasher::default());
let l = WTinyLFUCache::from_builder(builder).unwrap();
(l, nums)
},
Expand Down Expand Up @@ -100,11 +106,17 @@ fn bench_wtinylfu_cache_fnv_hasher(c: &mut Criterion) {
})
.collect(),
);
let builder = WTinyLFUCacheBuilder::<u64, DefaultKeyHasher<u64>, BuildHasherDefault<fnv::FnvHasher>, BuildHasherDefault<fnv::FnvHasher>, BuildHasherDefault<fnv::FnvHasher>>::new(82, 6488, 1622, 8192)
.set_key_hasher(DefaultKeyHasher::default())
.set_window_hasher(FnvBuildHasher::default())
.set_protected_hasher(FnvBuildHasher::default())
.set_probationary_hasher(FnvBuildHasher::default());
let builder = WTinyLFUCacheBuilder::<
u64,
DefaultKeyHasher<u64>,
BuildHasherDefault<fnv::FnvHasher>,
BuildHasherDefault<fnv::FnvHasher>,
BuildHasherDefault<fnv::FnvHasher>,
>::new(82, 6488, 1622, 8192)
.set_key_hasher(DefaultKeyHasher::default())
.set_window_hasher(FnvBuildHasher::default())
.set_protected_hasher(FnvBuildHasher::default())
.set_probationary_hasher(FnvBuildHasher::default());
let l = WTinyLFUCache::from_builder(builder).unwrap();
(l, nums)
},
Expand Down
3 changes: 1 addition & 2 deletions src/lfu/sampled.rs
Original file line number Diff line number Diff line change
Expand Up @@ -180,9 +180,8 @@ impl<K: Hash + Eq, KH: KeyHasher<K>, S: BuildHasher> SampledLFU<K, KH, S> {
/// Remove an entry from SampledLFU by hashed key
#[inline]
pub fn remove_hashed_key(&mut self, kh: u64) -> Option<i64> {
self.key_costs.remove(&kh).map(|cost| {
self.key_costs.remove(&kh).inspect(|&cost| {
self.used -= cost;
cost
})
}

Expand Down
12 changes: 6 additions & 6 deletions src/lfu/tinylfu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -176,9 +176,9 @@ impl<K: Hash + Eq, KH: KeyHasher<K>> TinyLFU<K, KH> {
/// # Details
/// Explanation from [TinyLFU: A Highly Efficient Cache Admission Policy §3.4.2]:
/// - When querying items, we use both the Doorkeeper and the main structures.
/// That is, if the item is included in the Doorkeeper,
/// TinyLFU estimates the frequency of this item as its estimation in the main structure plus 1.
/// Otherwise, TinyLFU returns just the estimation from the main structure.
/// That is, if the item is included in the Doorkeeper,
/// TinyLFU estimates the frequency of this item as its estimation in the main structure plus 1.
/// Otherwise, TinyLFU returns just the estimation from the main structure.
///
/// [TinyLFU: A Highly Efficient Cache Admission Policy §3.4.2]: https://arxiv.org/pdf/1512.00727.pdf
pub fn estimate<Q>(&self, key: &Q) -> u64
Expand All @@ -199,9 +199,9 @@ impl<K: Hash + Eq, KH: KeyHasher<K>> TinyLFU<K, KH> {
/// # Details
/// Explanation from [TinyLFU: A Highly Efficient Cache Admission Policy §3.4.2]:
/// - When querying items, we use both the Doorkeeper and the main structures.
/// That is, if the item is included in the Doorkeeper,
/// TinyLFU estimates the frequency of this item as its estimation in the main structure plus 1.
/// Otherwise, TinyLFU returns just the estimation from the main structure.
/// That is, if the item is included in the Doorkeeper,
/// TinyLFU estimates the frequency of this item as its estimation in the main structure plus 1.
/// Otherwise, TinyLFU returns just the estimation from the main structure.
///
/// [TinyLFU: A Highly Efficient Cache Admission Policy §3.4.2]: https://arxiv.org/pdf/1512.00727.pdf
pub fn estimate_hashed_key(&self, kh: u64) -> u64 {
Expand Down
8 changes: 4 additions & 4 deletions src/lfu/wtinylfu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -456,22 +456,22 @@ impl<K: Hash + Eq, V, KH: KeyHasher<K>, FH: BuildHasher, RH: BuildHasher, WH: Bu
WTinyLFUCacheBuilder::default()
}

///
/// Returns the window cache len
pub fn window_cache_len(&self) -> usize {
self.lru.len()
}

///
/// Returns the window cache cap
pub fn window_cache_cap(&self) -> usize {
self.lru.cap()
}

///
/// Returns the main cache len
pub fn main_cache_len(&self) -> usize {
self.slru.len()
}

///
/// Returns the main cache cap
pub fn main_cache_cap(&self) -> usize {
self.slru.cap()
}
Expand Down
18 changes: 11 additions & 7 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,11 +53,11 @@
//!
//! ## Acknowledgments
//! - The implementation of `RawLRU` is highly inspired by
//! [Jerome Froelich's LRU implementation](https://github.com/jeromefroe/lru-rs)
//! and [`std::collections`] library of Rust.
//! [Jerome Froelich's LRU implementation](https://github.com/jeromefroe/lru-rs)
//! and [`std::collections`] library of Rust.
//!
//! - Thanks for [HashiCorp's golang-lru](https://github.com/hashicorp/golang-lru)
//! providing the amazing Go implementation.
//! providing the amazing Go implementation.
//!
//! - Ramakrishna's paper: [Caching strategies to improve disk system performance]
//!
Expand Down Expand Up @@ -90,7 +90,11 @@
#![cfg_attr(docsrs, allow(unused_attributes))]
#![deny(missing_docs)]
#![allow(unused_doc_comments)]
#![allow(clippy::blocks_in_conditions, clippy::enum_variant_names)]
#![allow(
clippy::blocks_in_conditions,
clippy::enum_variant_names,
clippy::missing_transmute_annotations
)]

extern crate alloc;

Expand Down Expand Up @@ -212,13 +216,13 @@ pub trait OnEvictCallback {
/// `PutResult` is returned when try to put a entry in cache.
///
/// - **`PutResult::Put`** means that the key is not in cache previously, and the cache has enough
/// capacity, no evict happens.
/// capacity, no evict happens.
///
/// - **`PutResult::Update`** means that the key already exists in the cache,
/// and this operation updates the key's value and the inner is the old value.
/// and this operation updates the key's value and the inner is the old value.
///
/// - **`PutResult::Evicted`** means that the the key is not in cache previously,
/// but the cache is full, so the evict happens. The inner is the evicted entry `(Key, Value)`.
/// but the cache is full, so the evict happens. The inner is the evicted entry `(Key, Value)`.
///
/// - **`PutResult::EvictedAndUpdate`** is only possible to be returned by [`TwoQueueCache`] and [`AdaptiveCache`]. For more information, please see the related examples of [`TwoQueueCache`] and [`AdaptiveCache`]
///
Expand Down
16 changes: 8 additions & 8 deletions src/lru.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,14 @@
//! - [`SegmentedCache`] is a fixed size Segmented LRU cache.
//!
//! - [`AdaptiveCache`] is a fixed size Adaptive Replacement Cache (ARC).
//! ARC is an enhancement over the standard LRU cache in that tracks both
//! frequency and recency of use. This avoids a burst in access to new
//! entries from evicting the frequently used older entries.
//! ARC is an enhancement over the standard LRU cache in that tracks both
//! frequency and recency of use. This avoids a burst in access to new
//! entries from evicting the frequently used older entries.
//!
//!
//! - [`TwoQueueCache`] is a fixed size 2Q cache. 2Q is an enhancement
//! over the standard LRU cache in that it tracks both frequently
//! and recently used entries separately.
//! over the standard LRU cache in that it tracks both frequently
//! and recently used entries separately.
//!
//! ## Trade-Off
//! In theory, [`AdaptiveCache`] and [`TwoQueueCache`] add some additional
Expand Down Expand Up @@ -86,11 +86,11 @@
//!
//! ## Acknowledgments
//! - The implementation of `RawLRU` is highly inspired by
//! [Jerome Froelich's LRU implementation](https://github.com/jeromefroe/lru-rs)
//! and [`std::collections`] library of Rust.
//! [Jerome Froelich's LRU implementation](https://github.com/jeromefroe/lru-rs)
//! and [`std::collections`] library of Rust.
//!
//! - Thanks for [HashiCorp's golang-lru](https://github.com/hashicorp/golang-lru)
//! providing the amazing Go implementation.
//! providing the amazing Go implementation.
//!
//! - Ramakrishna's paper: [Caching strategies to improve disk system performance]
//!
Expand Down
3 changes: 1 addition & 2 deletions src/lru/raw.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ use core::iter::{FromIterator, FusedIterator};
use core::marker::PhantomData;
use core::mem;
use core::ptr::{self, NonNull};
use core::usize;

use crate::cache_api::ResizableCache;
use crate::lru::CacheError;
Expand Down Expand Up @@ -2141,7 +2140,7 @@ mod tests {
#[test]
#[cfg(feature = "hashbrown")]
fn test_with_hasher() {
use hashbrown::hash_map::DefaultHashBuilder;
use hashbrown::DefaultHashBuilder;

let s = DefaultHashBuilder::default();
let mut cache = RawLRU::with_hasher(16, s).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion src/lru/two_queue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -390,7 +390,7 @@ impl<K: Hash + Eq, V, RH: BuildHasher, FH: BuildHasher, GH: BuildHasher> Cache<K
///
/// # Note
/// - [`TwoQueueCache`] guarantees that the size of the recent LRU plus the size of the freq LRU
/// is less or equal to the [`TwoQueueCache`]'s size.
/// is less or equal to the [`TwoQueueCache`]'s size.
/// - The ghost LRU has its own size.
///
/// # Example
Expand Down

0 comments on commit bc185b4

Please sign in to comment.