Loading page…
Rust walkthroughs
Loading page…
LRU (Least Recently Used) Cache is a caching strategy that automatically evicts the least recently accessed items when the cache reaches capacity. The lru crate provides an efficient O(1) implementation using a hash map combined with a doubly-linked list.
Key concepts:
When to use LRU Cache:
When NOT to use LRU Cache:
use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
// Create cache with capacity of 3
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
// Insert items
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
println!("Cache: {:?}", cache);
// Adding 4th item evicts least recently used ("a")
cache.put("d", 4);
println!("After adding 'd': {:?}", cache);
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
// Access "a" - promotes it to most recently used
let value = cache.get(&"a");
println!("Got 'a': {:?}", value);
// Now adding "d" evicts "b" (not "a" which was just accessed)
cache.put("d", 4);
println!("'a' still exists: {:?}", cache.get(&"a"));
println!("'b' was evicted: {:?}", cache.get(&"b"));
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, Vec<i32>> = LruCache::new(capacity);
cache.put("nums", vec![1, 2, 3]);
// Get mutable reference and modify
if let Some(nums) = cache.get_mut(&"nums") {
nums.push(4);
}
println!("Updated: {:?}", cache.get(&"nums"));
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
cache.put("b", 2);
// Check if key exists (without promoting)
println!("Contains 'a': {}", cache.contains(&"a"));
// Peek at value without promoting
let value = cache.peek(&"a");
println!("Peeked 'a': {:?}", value);
// Peek mutable (also doesn't promote)
if let Some(val) = cache.peek_mut(&"a") {
*val = 10;
}
println!("After peek_mut: {:?}", cache.get(&"a"));
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(5).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
// Remove specific key
let removed = cache.pop(&"b");
println!("Removed 'b': {:?}", removed);
// Pop least recently used
let lru = cache.pop_lru();
println!("Popped LRU: {:?}", lru);
// Pop most recently used
let mru = cache.pop_mru();
println!("Popped MRU: {:?}", mru);
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(5).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
// Iterate from most to least recently used
println!("MRU to LRU:");
for (key, value) in &cache {
println!(" {} = {}", key, value);
}
// Iterate from least to most recently used
println!("LRU to MRU:");
for (key, value) in cache.iter().rev() {
println!(" {} = {}", key, value);
}
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
cache.put("d", 4);
println!("Len: {}, Capacity: {}", cache.len(), cache.cap().get());
// Resize cache (smaller capacity evicts items)
let new_cap = NonZeroUsize::new(2).unwrap();
cache.resize(new_cap);
println!("After resize: Len: {}, Capacity: {}", cache.len(), cache.cap().get());
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(5).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
println!("Len: {}, Is empty: {}", cache.len(), cache.is_empty());
cache.clear();
println!("After clear: Len: {}, Is empty: {}", cache.len(), cache.is_empty());
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(2).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
cache.put("b", 2);
// Try to insert without evicting
let evicted = cache.try_put("c", 3);
println!("Evicted: {:?}", evicted); // None - cache is full, no room
// Put or update (will evict if needed)
let evicted = cache.put("c", 3);
println!("Evicted: {:?}", evicted); // Some(("a", 1))
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
cache.put("a", 1);
// Get existing value
let value = cache.get_or_insert("a", || 100);
println!("Got existing: {}", value);
// Insert new value
let value = cache.get_or_insert("b", || 200);
println!("Inserted new: {}", value);
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::hash::Hash;
struct Memoized<F, K, V>
where
K: Eq + Hash + Clone,
{
cache: LruCache<K, V>,
f: F,
}
impl<F, K, V> Memoized<F, K, V>
where
F: Fn(&K) -> V,
K: Eq + Hash + Clone,
V: Clone,
{
fn new(capacity: usize, f: F) -> Self {
let cap = NonZeroUsize::new(capacity).unwrap();
Self {
cache: LruCache::new(cap),
f,
}
}
fn call(&mut self, key: K) -> V {
if let Some(value) = self.cache.get(&key) {
return value.clone();
}
let value = (self.f)(&key);
self.cache.put(key, value.clone());
value
}
}
fn expensive_computation(n: &i32) -> i32 {
println!("Computing for {}...", n);
n * n
}
fn main() {
let mut memo = Memoized::new(3, expensive_computation);
println!("Result: {}", memo.call(5)); // Computes
println!("Result: {}", memo.call(5)); // Cached
println!("Result: {}", memo.call(3)); // Computes
}use lru::LruCache;
use std::num::NonZeroUsize;
#[derive(Clone)]
struct HttpResponse {
status: u16,
body: String,
}
struct HttpCache {
cache: LruCache<String, HttpResponse>,
}
impl HttpCache {
fn new(capacity: usize) -> Self {
let cap = NonZeroUsize::new(capacity).unwrap();
Self {
cache: LruCache::new(cap),
}
}
fn get(&mut self, url: &str) -> Option<HttpResponse> {
self.cache.get(url).cloned()
}
fn store(&mut self, url: String, response: HttpResponse) {
self.cache.put(url, response);
}
fn stats(&self) -> (usize, usize) {
(self.cache.len(), self.cache.cap().get())
}
}
fn main() {
let mut cache = HttpCache::new(100);
cache.store(
"https://example.com".to_string(),
HttpResponse {
status: 200,
body: "Hello".to_string(),
},
);
if let Some(response) = cache.get("https://example.com") {
println!("Status: {}", response.status);
}
}use lru::LruCache;
use std::num::NonZeroUsize;
struct QueryCache {
cache: LruCache<String, Vec<(i32, String)>>,
}
impl QueryCache {
fn new(capacity: usize) -> Self {
let cap = NonZeroUsize::new(capacity).unwrap();
Self {
cache: LruCache::new(cap),
}
}
fn query(&mut self, query: &str) -> Option<&Vec<(i32, String)>> {
self.cache.get(query)
}
fn store(&mut self, query: String, results: Vec<(i32, String)>) {
self.cache.put(query, results);
}
}
fn main() {
let mut cache = QueryCache::new(50);
// Simulate caching query results
cache.store(
"SELECT * FROM users".to_string(),
vec![(1, "Alice".to_string()), (2, "Bob".to_string())],
);
if let Some(results) = cache.query("SELECT * FROM users") {
for (id, name) in results {
println!("{}: {}", id, name);
}
}
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
struct ThreadSafeCache<K, V> {
cache: Mutex<LruCache<K, V>>,
}
impl<K, V> ThreadSafeCache<K, V>
where
K: std::hash::Hash + Eq,
{
fn new(capacity: usize) -> Self {
let cap = NonZeroUsize::new(capacity).unwrap();
Self {
cache: Mutex::new(LruCache::new(cap)),
}
}
fn get(&self, key: &K) -> Option<V>
where
V: Clone,
{
self.cache.lock().unwrap().get(key).cloned()
}
fn put(&self, key: K, value: V) {
self.cache.lock().unwrap().put(key, value);
}
}
fn main() {
let cache = Arc::new(ThreadSafeCache::<&str, i32>::new(10));
cache.put("a", 1);
cache.put("b", 2);
if let Some(val) = cache.get(&"a") {
println!("Got: {}", val);
}
}use lru::LruCache;
use std::num::NonZeroUsize;
struct StatsCache<K, V> {
cache: LruCache<K, V>,
hits: u64,
misses: u64,
}
impl<K, V> StatsCache<K, V>
where
K: std::hash::Hash + Eq,
{
fn new(capacity: usize) -> Self {
let cap = NonZeroUsize::new(capacity).unwrap();
Self {
cache: LruCache::new(cap),
hits: 0,
misses: 0,
}
}
fn get(&mut self, key: &K) -> Option<&V> {
if self.cache.contains(key) {
self.hits += 1;
} else {
self.misses += 1;
}
self.cache.get(key)
}
fn put(&mut self, key: K, value: V) -> Option<V> {
self.cache.put(key, value)
}
fn stats(&self) -> (u64, u64, f64) {
let total = self.hits + self.misses;
let hit_rate = if total > 0 {
self.hits as f64 / total as f64
} else {
0.0
};
(self.hits, self.misses, hit_rate)
}
}
fn main() {
let mut cache = StatsCache::<&str, i32>::new(3);
cache.put("a", 1);
cache.put("b", 2);
cache.get(&"a"); // Hit
cache.get(&"c"); // Miss
cache.get(&"a"); // Hit
let (hits, misses, rate) = cache.stats();
println!("Hits: {}, Misses: {}, Hit rate: {:.2}%", hits, misses, rate * 100.0);
}use lru::LruCache;
use std::num::NonZeroUsize;
struct TwoLevelCache<K, V> {
hot: LruCache<K, V>, // Small, fast cache
cold: LruCache<K, V>, // Larger, slower cache
}
impl<K, V> TwoLevelCache<K, V>
where
K: std::hash::Hash + Eq + Clone,
V: Clone,
{
fn new(hot_size: usize, cold_size: usize) -> Self {
let hot_cap = NonZeroUsize::new(hot_size).unwrap();
let cold_cap = NonZeroUsize::new(cold_size).unwrap();
Self {
hot: LruCache::new(hot_cap),
cold: LruCache::new(cold_cap),
}
}
fn get(&mut self, key: &K) -> Option<V> {
// Check hot cache first
if let Some(value) = self.hot.get(key) {
return Some(value.clone());
}
// Check cold cache
if let Some(value) = self.cold.get(key) {
// Promote to hot cache
self.hot.put(key.clone(), value.clone());
return Some(value);
}
None
}
fn put(&mut self, key: K, value: V) {
// Insert into cold cache, potentially demoting from hot
self.cold.put(key.clone(), value.clone());
self.hot.put(key, value);
}
}
fn main() {
let mut cache = TwoLevelCache::<&str, i32>::new(2, 5);
cache.put("a", 1);
cache.put("b", 2);
if let Some(val) = cache.get(&"a") {
println!("Got: {}", val);
}
}LruCache Type Signature:
use lru::LruCache;
use std::num::NonZeroUsize;
let capacity = NonZeroUsize::new(100).unwrap();
let cache: LruCache<K, V> = LruCache::new(capacity);Key Methods:
| Method | Description |
|--------|-------------|
| put(k, v) | Insert item, returns evicted item |
| get(&k) | Get item, promotes to MRU |
| get_mut(&k) | Get mutable ref, promotes |
| peek(&k) | Get without promoting |
| peek_mut(&k) | Get mutable without promoting |
| contains(&k) | Check existence without promoting |
| pop(&k) | Remove specific key |
| pop_lru() | Remove least recently used |
| pop_mru() | Remove most recently used |
| len() | Number of items |
| cap() | Maximum capacity |
| clear() | Remove all items |
| resize(new_cap) | Change capacity |
Cache Operations Time Complexity:
| Operation | Time | |-----------|------| | Insert | O(1) | | Get | O(1) | | Eviction | O(1) | | Lookup | O(1) |
LRU vs Other Eviction Policies:
| Policy | Evicts | Best For | |--------|--------|----------| | LRU | Least recently used | Temporal locality | | LFU | Least frequently used | Popularity patterns | | FIFO | Oldest insertion | Simple use cases | | Random | Random item | Simplicity |
Key Points:
NonZeroUsize for capacityget promotes item to most recently usedpeek to access without promotingput returns evicted item if anyresize to change capacityMutex for thread safety