How do I cache data with an LRU cache in Rust?

Walkthrough

The lru crate provides a fast, thread-safe LRU (Least Recently Used) cache implementation in Rust. An LRU cache automatically evicts the least recently accessed items when it reaches capacity, making it ideal for caching frequently accessed data while bounding memory usage. The crate offers both a standard LruCache and a thread-safe LruCache with Arc and Mutex for concurrent access.

Key concepts:

  1. Capacity — maximum number of items the cache can hold
  2. Eviction — automatic removal of least recently used items
  3. Put/Get — insert and retrieve items, updating access order
  4. Peek — retrieve without updating access order
  5. Entry API — similar to HashMap's entry API for conditional operations
  6. Iteration — iterate over items in access order (most to least recent)

Code Example

# Cargo.toml
[dependencies]
lru = "0.12"
use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    // Create an LRU cache with capacity for 3 items
    let mut cache: LruCache<&str, i32> = 
        LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // Insert items
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // Access "a" - moves it to most recent
    let _ = cache.get(&"a");
    
    // Insert "d" - evicts "b" (least recently used)
    cache.put("d", 4);
    
    println!("Cache contains: {:?}", cache.iter().collect::<Vec<_>>());
    // Shows: [("a", 1), ("c", 3), ("d", 4)]
    
    assert!(cache.contains(&"a"));
    assert!(!cache.contains(&"b")); // evicted
}

Basic Usage

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    // Create cache with capacity for 5 items
    let mut cache: LruCache<String, i32> = 
        LruCache::new(NonZeroUsize::new(5).unwrap());
    
    // Insert items
    cache.put("one".to_string(), 1);
    cache.put("two".to_string(), 2);
    cache.put("three".to_string(), 3);
    
    // Get item (updates access order)
    if let Some(value) = cache.get(&"one".to_string()) {
        println!("Got: {}", value);
    }
    
    // Check if contains
    println!("Contains 'two': {}", cache.contains(&"two".to_string()));
    
    // Remove item
    if let Some(value) = cache.pop(&"two".to_string()) {
        println!("Removed: {}", value);
    }
    
    // Get current size
    println!("Cache size: {}", cache.len());
    println!("Cache capacity: {}", cache.cap());
}

Capacity and Eviction

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<i32, &str> = 
        LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // Fill cache to capacity
    cache.put(1, "one");
    cache.put(2, "two");
    cache.put(3, "three");
    println!("After filling: {:?}", 
             cache.iter().map(|(k, v)| (*k, *v)).collect::<Vec<_>>());
    
    // Add another item - evicts least recently used (key 1)
    cache.put(4, "four");
    println!("After adding 4: {:?}", 
             cache.iter().map(|(k, v)| (*k, *v)).collect::<Vec<_>>());
    // Order: most recent first
    
    // Access key 2 to make it more recent
    let _ = cache.get(&2);
    println!("After accessing 2: {:?}", 
             cache.iter().map(|(k, v)| (*k, *v)).collect::<Vec<_>>());
    
    // Add another item - evicts key 3 (now least recent)
    cache.put(5, "five");
    println!("After adding 5: {:?}", 
             cache.iter().map(|(k, v)| (*k, *v)).collect::<Vec<_>>());
    
    // Check what's in cache
    assert!(cache.contains(&2));
    assert!(cache.contains(&4));
    assert!(cache.contains(&5));
    assert!(!cache.contains(&1)); // evicted
    assert!(!cache.contains(&3)); // evicted
}

Put with Return Value

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = 
        LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // put returns None for new keys
    let old = cache.put("a", 1);
    println!("Put 'a': {:?}", old); // None
    
    // put returns old value for existing keys
    let old = cache.put("a", 10);
    println!("Put 'a' again: {:?}", old); // Some(1)
    
    // put returns evicted value when capacity exceeded
    cache.put("b", 2);
    cache.put("c", 3);
    
    let evicted = cache.put("d", 4);
    println!("Evicted when adding 'd': {:?}", evicted);
    // Some(("a", 1)) - "a" was least recently used
}

Get vs Peek

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = 
        LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // Order: a (LRU) -> b -> c (MRU)
    println!("Initial order: {:?}", 
             cache.iter().map(|(k, _)| *k).collect::<Vec<_>>());
    
    // get() updates access order
    let _ = cache.get(&"a");
    println!("After get('a'): {:?}", 
             cache.iter().map(|(k, _)| *k).collect::<Vec<_>>());
    // Now: b (LRU) -> c -> a (MRU)
    
    // peek() does NOT update access order
    let _ = cache.peek(&"b");
    println!("After peek('b'): {:?}", 
             cache.iter().map(|(k, _)| *k).collect::<Vec<_>>());
    // Order unchanged: b (LRU) -> c -> a (MRU)
    
    // peek_mut() also doesn't update order
    if let Some(value) = cache.peek_mut(&"b") {
        *value *= 10;
    }
    println!("After peek_mut('b'): {:?}", 
             cache.iter().map(|(k, v)| (*k, *v)).collect::<Vec<_>>());
}

Entry API

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = 
        LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // or_insert() - insert if missing
    cache.entry("a").or_insert(1);
    cache.entry("a").or_insert(100); // Already exists, no change
    println!("a = {:?}", cache.get(&"a"));
    
    // or_insert_with() - lazy insertion
    cache.entry("b").or_insert_with(|| {
        println!("Computing value for 'b'");
        2
    });
    cache.entry("b").or_insert_with(|| {
        println!("This won't print");
        200
    });
    
    // or_default() - insert default value
    cache.entry("c").or_default();
    println!("c = {:?}", cache.get(&"c"));
    
    // and_modify() - modify existing value
    cache.entry("a").and_modify(|v| *v += 1);
    println!("a after modify = {:?}", cache.get(&"a"));
    
    // Chain operations
    cache.entry("d")
        .and_modify(|v| *v += 1)
        .or_insert(4);
    println!("d = {:?}", cache.get(&"d"));
}

Iteration

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<i32, &str> = 
        LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put(1, "one");
    cache.put(2, "two");
    cache.put(3, "three");
    cache.put(4, "four");
    cache.put(5, "five");
    
    // Iterate from most to least recently used
    println!("MRU to LRU:");
    for (key, value) in cache.iter() {
        println!("  {} -> {}", key, value);
    }
    
    // Iterate from least to most recently used
    println!("\nLRU to MRU:");
    for (key, value) in cache.iter().rev() {
        println!("  {} -> {}", key, value);
    }
    
    // Mutable iteration
    println!("\nAfter modification:");
    for (key, value) in cache.iter_mut() {
        *value = value.to_uppercase();
    }
    for (key, value) in cache.iter() {
        println!("  {} -> {}", key, value);
    }
    
    // Keys only
    println!("\nKeys only:");
    for key in cache.keys() {
        println!("  {}", key);
    }
    
    // Values only
    println!("\nValues only:");
    for value in cache.values() {
        println!("  {}", value);
    }
}

Resizing Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<i32, &str> = 
        LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put(1, "one");
    cache.put(2, "two");
    cache.put(3, "three");
    cache.put(4, "four");
    cache.put(5, "five");
    
    println!("Size: {}, Capacity: {}", cache.len(), cache.cap());
    
    // Resize to smaller capacity - evicts LRU items
    cache.resize(NonZeroUsize::new(3).unwrap());
    println!("\nAfter resize to 3:");
    println!("Size: {}, Capacity: {}", cache.len(), cache.cap());
    println!("Remaining: {:?}", 
             cache.iter().map(|(k, _)| *k).collect::<Vec<_>>());
    
    // Resize to larger capacity
    cache.resize(NonZeroUsize::new(10).unwrap());
    println!("\nAfter resize to 10:");
    println!("Size: {}, Capacity: {}", cache.len(), cache.cap());
    
    // Add more items
    cache.put(6, "six");
    cache.put(7, "seven");
    println!("After adding more: {:?}", 
             cache.iter().map(|(k, _)| *k).collect::<Vec<_>>());
}

Clearing Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = 
        LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Before clear: {} items", cache.len());
    
    // Clear all items
    cache.clear();
    
    println!("After clear: {} items", cache.len());
    assert!(cache.is_empty());
    
    // Cache still has its capacity
    println!("Capacity: {}", cache.cap());
}

Push Operation

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = 
        LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // push() returns evicted item if cache is full
    let evicted = cache.push("a", 1);
    println!("Push 'a': evicted {:?}", evicted);
    
    cache.push("b", 2);
    cache.push("c", 3);
    
    // Now cache is full
    let evicted = cache.push("d", 4);
    println!("Push 'd': evicted {:?}", evicted);
    // Some(("a", 1))
    
    // If key exists, push updates value but doesn't evict
    let evicted = cache.push("b", 20);
    println!("Push 'b' again: evicted {:?}", evicted);
    // None - no eviction since 'b' was already in cache
    
    println!("Cache contents: {:?}", 
             cache.iter().map(|(k, v)| (*k, *v)).collect::<Vec<_>>());
}

Contains and Get Mut

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = 
        LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    
    // Check existence without updating order
    println!("Contains 'a': {}", cache.contains(&"a"));
    println!("Contains 'c': {}", cache.contains(&"c"));
    
    // Get mutable reference (updates order)
    if let Some(value) = cache.get_mut(&"a") {
        *value += 10;
        println!("Modified 'a' to {}", value);
    }
    
    // Verify modification
    println!("'a' is now: {:?}", cache.get(&"a"));
}

Thread-Safe LRU Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use std::thread;
 
fn main() {
    // Wrap LruCache in Arc<Mutex<>> for thread safety
    let cache = Arc::new(Mutex::new(
        LruCache::new(NonZeroUsize::new(10).unwrap())
    ));
    
    let mut handles = vec![];
    
    for i in 0..5 {
        let cache_clone = Arc::clone(&cache);
        let handle = thread::spawn(move || {
            let mut cache = cache_clone.lock().unwrap();
            cache.put(format!("key-{}", i), i * 10);
            println!("Thread {} inserted key-{}", i, i);
        });
        handles.push(handle);
    }
    
    for handle in handles {
        handle.join().unwrap();
    }
    
    // Check cache contents
    let cache = cache.lock().unwrap();
    println!("\nCache contents:");
    for (key, value) in cache.iter() {
        println!("  {} -> {}", key, value);
    }
    println!("Size: {}", cache.len());
}

Caching Function Results

use lru::LruCache;
use std::num::NonZeroUsize;
 
// Expensive computation
fn expensive_computation(n: i32) -> i32 {
    println!("Computing fib({})...", n);
    if n <= 1 { n } else { expensive_computation(n - 1) + expensive_computation(n - 2) }
}
 
struct CachedCalculator {
    cache: LruCache<i32, i32>,
    hits: u64,
    misses: u64,
}
 
impl CachedCalculator {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            hits: 0,
            misses: 0,
        }
    }
    
    fn compute(&mut self, n: i32) -> i32 {
        if let Some(&result) = self.cache.get(&n) {
            self.hits += 1;
            println!("Cache hit for fib({})", n);
            return result;
        }
        
        self.misses += 1;
        let result = expensive_computation(n);
        self.cache.put(n, result);
        result
    }
    
    fn stats(&self) -> (u64, u64) {
        (self.hits, self.misses)
    }
}
 
fn main() {
    let mut calc = CachedCalculator::new(10);
    
    // First call computes
    let r1 = calc.compute(10);
    println!("Result: {}", r1);
    
    // Second call uses cache
    let r2 = calc.compute(10);
    println!("Result: {}", r2);
    
    // Cache miss
    let r3 = calc.compute(15);
    println!("Result: {}", r3);
    
    let (hits, misses) = calc.stats();
    println!("\nCache stats: {} hits, {} misses", hits, misses);
}

Web Request Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::collections::HashMap;
 
struct WebCache {
    cache: LruCache<String, String>,
    requests: u64,
    cache_hits: u64,
}
 
impl WebCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            requests: 0,
            cache_hits: 0,
        }
    }
    
    fn get(&mut self, url: &str) -> Option<&String> {
        self.requests += 1;
        if self.cache.contains(&url.to_string()) {
            self.cache_hits += 1;
        }
        self.cache.get(&url.to_string())
    }
    
    fn put(&mut self, url: String, content: String) {
        self.cache.put(url, content);
    }
    
    fn stats(&self) -> (u64, u64, f64) {
        let hit_rate = if self.requests > 0 {
            self.cache_hits as f64 / self.requests as f64 * 100.0
        } else {
            0.0
        };
        (self.requests, self.cache_hits, hit_rate)
    }
}
 
fn main() {
    let mut cache = WebCache::new(3);
    
    // Simulate requests
    cache.put("https://example.com".to_string(), "Example content".to_string());
    cache.put("https://rust-lang.org".to_string(), "Rust homepage".to_string());
    cache.put("https://docs.rs".to_string(), "Docs.rs".to_string());
    
    // Cache hit
    if let Some(content) = cache.get("https://example.com") {
        println!("Got: {}", content);
    }
    
    // Cache hit
    if let Some(content) = cache.get("https://rust-lang.org") {
        println!("Got: {}", content);
    }
    
    // Add new URL - will evict least recently used
    cache.put("https://crates.io".to_string(), "Crates.io".to_string());
    
    // Cache miss (was evicted)
    if cache.get("https://docs.rs").is_none() {
        println!("docs.rs was evicted");
    }
    
    let (requests, hits, rate) = cache.stats();
    println!("\nStats: {} requests, {} hits, {:.1}% hit rate", 
             requests, hits, rate);
}

Database Query Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
struct QueryCache {
    cache: LruCache<String, Vec<String>>,
}
 
impl QueryCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn query(&mut self, sql: &str) -> Vec<String> {
        if let Some(results) = self.cache.get(&sql.to_string()) {
            println!("Cache hit for: {}", sql);
            return results.clone();
        }
        
        println!("Executing query: {}", sql);
        let results = self.execute_query(sql);
        self.cache.put(sql.to_string(), results.clone());
        results
    }
    
    fn execute_query(&self, sql: &str) -> Vec<String> {
        // Simulated database query
        if sql.contains("users") {
            vec!["alice".to_string(), "bob".to_string(), "charlie".to_string()]
        } else if sql.contains("products") {
            vec!["widget".to_string(), "gadget".to_string()]
        } else {
            vec![]
        }
    }
}
 
fn main() {
    let mut cache = QueryCache::new(5);
    
    // First query - executes
    let users = cache.query("SELECT * FROM users");
    println!("Users: {:?}", users);
    
    // Same query - cached
    let users_again = cache.query("SELECT * FROM users");
    println!("Users again: {:?}", users_again);
    
    // Different query
    let products = cache.query("SELECT * FROM products");
    println!("Products: {:?}", products);
}

LRU Cache with Expiration

use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
 
struct TimedEntry<T> {
    value: T,
    expires_at: Instant,
}
 
struct TimedLruCache<K, V> {
    cache: LruCache<K, TimedEntry<V>>,
    default_ttl: Duration,
}
 
impl<K: std::hash::Hash + Eq + Clone, V: Clone> TimedLruCache<K, V> {
    fn new(capacity: usize, default_ttl: Duration) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            default_ttl,
        }
    }
    
    fn put(&mut self, key: K, value: V) {
        let entry = TimedEntry {
            value,
            expires_at: Instant::now() + self.default_ttl,
        };
        self.cache.put(key, entry);
    }
    
    fn get(&mut self, key: &K) -> Option<V> {
        if let Some(entry) = self.cache.get(key) {
            if Instant::now() < entry.expires_at {
                return Some(entry.value.clone());
            } else {
                // Expired - remove and return None
                self.cache.pop(key);
            }
        }
        None
    }
    
    fn remove_expired(&mut self) -> usize {
        let now = Instant::now();
        let expired: Vec<K> = self.cache
            .iter()
            .filter(|(_, entry)| entry.expires_at <= now)
            .map(|(k, _)| k.clone())
            .collect();
        
        let count = expired.len();
        for key in expired {
            self.cache.pop(&key);
        }
        count
    }
}
 
fn main() {
    let mut cache = TimedLruCache::new(10, Duration::from_millis(100));
    
    cache.put("a", 1);
    cache.put("b", 2);
    
    // Immediately available
    println!("a: {:?}", cache.get(&"a".to_string()));
    println!("b: {:?}", cache.get(&"b".to_string()));
    
    // Wait for expiration
    std::thread::sleep(Duration::from_millis(150));
    
    // Now expired
    println!("After sleep - a: {:?}", cache.get(&"a".to_string()));
    println!("After sleep - b: {:?}", cache.get(&"b".to_string()));
}

Custom Key Types

use lru::LruCache;
use std::num::NonZeroUsize;
 
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct CacheKey {
    region: String,
    resource_id: i32,
}
 
impl CacheKey {
    fn new(region: &str, id: i32) -> Self {
        Self {
            region: region.to_string(),
            resource_id: id,
        }
    }
}
 
#[derive(Debug, Clone)]
struct Resource {
    name: String,
    data: String,
}
 
fn main() {
    let mut cache: LruCache<CacheKey, Resource> = 
        LruCache::new(NonZeroUsize::new(5).unwrap());
    
    // Insert with custom key
    cache.put(
        CacheKey::new("us-east-1", 100),
        Resource { name: "Server A".to_string(), data: "data".to_string() }
    );
    
    cache.put(
        CacheKey::new("eu-west-1", 200),
        Resource { name: "Server B".to_string(), data: "data".to_string() }
    );
    
    // Retrieve with custom key
    let key = CacheKey::new("us-east-1", 100);
    if let Some(resource) = cache.get(&key) {
        println!("Found: {:?}", resource);
    }
    
    // Check existence
    let other_key = CacheKey::new("ap-south-1", 300);
    println!("Contains {:?}: {}", other_key, cache.contains(&other_key));
}

Real-World: HTTP Response Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use std::thread;
 
#[derive(Debug, Clone)]
struct HttpResponse {
    status: u16,
    body: String,
    headers: Vec<(String, String)>,
}
 
struct HttpCache {
    cache: LruCache<String, HttpResponse>,
}
 
impl HttpCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn get(&mut self, url: &str) -> Option<&HttpResponse> {
        self.cache.get(&url.to_string())
    }
    
    fn put(&mut self, url: String, response: HttpResponse) -> Option<HttpResponse> {
        self.cache.put(url, response)
    }
    
    fn get_or_fetch(&mut self, url: &str) -> HttpResponse {
        if let Some(response) = self.cache.get(&url.to_string()) {
            println!("Cache hit: {}", url);
            return response.clone();
        }
        
        println!("Fetching: {}", url);
        // Simulate HTTP request
        let response = HttpResponse {
            status: 200,
            body: format!("Content for {}", url),
            headers: vec![
                ("content-type".to_string(), "text/html".to_string()),
            ],
        };
        
        self.cache.put(url.to_string(), response.clone());
        response
    }
}
 
fn main() {
    let cache = Arc::new(Mutex::new(HttpCache::new(3)));
    
    // Simulate multiple requests
    let urls = vec![
        "https://example.com",
        "https://rust-lang.org",
        "https://example.com", // Should hit cache
        "https://docs.rs",
        "https://rust-lang.org", // Should hit cache
    ];
    
    for url in urls {
        let mut cache = cache.lock().unwrap();
        let response = cache.get_or_fetch(url);
        println!("  Response status: {}\n", response.status);
    }
}

Real-World: Configuration Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::collections::HashMap;
 
struct ConfigCache {
    cache: LruCache<String, HashMap<String, String>>,
}
 
impl ConfigCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn get_config(&mut self, service: &str) -> HashMap<String, String> {
        if let Some(config) = self.cache.get(&service.to_string()) {
            println!("Config cache hit for: {}", service);
            return config.clone();
        }
        
        println!("Loading config for: {}", service);
        let config = self.load_config(service);
        self.cache.put(service.to_string(), config.clone());
        config
    }
    
    fn load_config(&self, service: &str) -> HashMap<String, String> {
        // Simulate config loading
        let mut config = HashMap::new();
        config.insert("timeout".to_string(), "30".to_string());
        config.insert("retries".to_string(), "3".to_string());
        config.insert("service".to_string(), service.to_string());
        config
    }
    
    fn invalidate(&mut self, service: &str) {
        self.cache.pop(&service.to_string());
        println!("Invalidated config for: {}", service);
    }
}
 
fn main() {
    let mut cache = ConfigCache::new(5);
    
    // Load configs
    let api_config = cache.get_config("api-service");
    println!("API config: {:?}", api_config);
    
    let db_config = cache.get_config("db-service");
    println!("DB config: {:?}", db_config);
    
    // Cache hit
    let api_config_again = cache.get_config("api-service");
    println!("API config again: {:?}", api_config_again);
    
    // Invalidate
    cache.invalidate("api-service");
    
    // Cache miss after invalidation
    let api_config_reload = cache.get_config("api-service");
    println!("API config reloaded: {:?}", api_config_reload);
}

Real-World: Memoization

use lru::LruCache;
use std::num::NonZeroUsize;
 
struct Memoizer<F, K, V>
where
    F: Fn(K) -> V,
    K: std::hash::Hash + Eq,
{
    cache: LruCache<K, V>,
    func: F,
}
 
impl<F, K, V> Memoizer<F, K, V>
where
    F: Fn(K) -> V,
    K: std::hash::Hash + Eq + Clone,
    V: Clone,
{
    fn new(capacity: usize, func: F) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            func,
        }
    }
    
    fn call(&mut self, arg: K) -> V {
        if let Some(value) = self.cache.get(&arg) {
            return value.clone();
        }
        
        let value = (self.func)(arg.clone());
        self.cache.put(arg, value.clone());
        value
    }
}
 
fn main() {
    // Memoize expensive string operations
    let mut uppercaser = Memoizer::new(10, |s: String| {
        println!("Computing uppercase for: {}", s);
        s.to_uppercase()
    });
    
    println!("Result: {}", uppercaser.call("hello".to_string()));
    println!("Result: {}", uppercaser.call("hello".to_string())); // Cached
    println!("Result: {}", uppercaser.call("world".to_string()));
    println!("Result: {}", uppercaser.call("hello".to_string())); // Still cached
    
    // Memoize factorial
    let mut factorial = Memoizer::new(20, |n: i32| {
        println!("Computing factorial({})", n);
        (1..=n).product::<i32>()
    });
    
    println!("5! = {}", factorial.call(5));
    println!("5! = {}", factorial.call(5)); // Cached
    println!("10! = {}", factorial.call(10));
}

Real-World: Session Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use std::collections::HashMap;
 
#[derive(Debug, Clone)]
struct Session {
    user_id: String,
    created_at: u64,
    data: HashMap<String, String>,
}
 
struct SessionCache {
    cache: LruCache<String, Session>,
}
 
impl SessionCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn create_session(&mut self, session_id: String, user_id: String) -> Session {
        let session = Session {
            user_id,
            created_at: std::time::SystemTime::now()
                .duration_since(std::time::UNIX_EPOCH)
                .unwrap()
                .as_secs(),
            data: HashMap::new(),
        };
        self.cache.put(session_id.clone(), session.clone());
        println!("Created session {} for user {}", session_id, session.user_id);
        session
    }
    
    fn get_session(&mut self, session_id: &str) -> Option<&Session> {
        self.cache.get(&session_id.to_string())
    }
    
    fn update_session(&mut self, session_id: &str, key: String, value: String) -> bool {
        if let Some(session) = self.cache.get_mut(&session_id.to_string()) {
            session.data.insert(key, value);
            true
        } else {
            false
        }
    }
    
    fn delete_session(&mut self, session_id: &str) -> Option<Session> {
        self.cache.pop(&session_id.to_string())
    }
}
 
fn main() {
    let mut cache = SessionCache::new(100);
    
    // Create sessions
    cache.create_session("session-1".to_string(), "user-alice".to_string());
    cache.create_session("session-2".to_string(), "user-bob".to_string());
    
    // Use session
    if let Some(session) = cache.get_session("session-1") {
        println!("Session user: {}", session.user_id);
    }
    
    // Update session
    cache.update_session("session-1", "theme".to_string(), "dark".to_string());
    cache.update_session("session-1", "language".to_string(), "en".to_string());
    
    // Check session data
    if let Some(session) = cache.get_session("session-1") {
        println!("Session data: {:?}", session.data);
    }
    
    // Delete session
    cache.delete_session("session-1");
    println!("Session deleted");
}

Summary

  • Create with LruCache::new(NonZeroUsize::new(capacity).unwrap())
  • Use put(key, value) to insert, returns evicted item if capacity exceeded
  • Use get(&key) to retrieve and update access order
  • Use peek(&key) to retrieve without updating access order
  • Use contains(&key) to check existence without updating order
  • Use pop(&key) to remove and return a value
  • Entry API: entry(key).or_insert(value), or_insert_with(), or_default()
  • Iterate with iter() (most to least recent) or iter().rev()
  • Resize with resize(new_capacity) - evicts LRU items if shrinking
  • For thread safety, wrap in Arc<Mutex<LruCache>>
  • Perfect for: function result caching, HTTP response caching, database query caching, session management, memoization
  • Automatic eviction of least recently used items when capacity is reached