How do I implement LRU caching in Rust?

Walkthrough

The lru crate provides a fast, well-tested implementation of an LRU (Least Recently Used) cache. An LRU cache evicts the least recently accessed entry when capacity is exceeded, making it ideal for caching scenarios where recent access patterns predict future accesses. The crate uses a combination of a hash map and a doubly-linked list for O(1) insertion, lookup, and eviction. Common use cases include memoization, HTTP response caching, database query caching, and any situation where you want bounded memory usage with intelligent eviction.

Key concepts:

  1. LruCache<K, V> — the main cache type with capacity-based eviction
  2. Capacity — maximum number of entries before eviction occurs
  3. get/promote — accessing entries moves them to "most recent" position
  4. put/push — insert entries, evicts LRU entry if at capacity
  5. Entry API — similar to HashMap's entry API for conditional operations

Code Example

# Cargo.toml
[dependencies]
lru = "0.12"
use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    if let Some(&value) = cache.get(&"a") {
        println!("Got: {}", value);
    }
}

Creating an LRU Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    // Create with capacity (must be non-zero)
    let capacity = NonZeroUsize::new(10).unwrap();
    let mut cache: LruCache<i32, String> = LruCache::new(capacity);
    
    println!("Capacity: {}", cache.cap());
    println!("Size: {}", cache.len());
    println!("Is empty: {}", cache.is_empty());
    
    // Unbounded cache (never evicts) - use with caution!
    // let unbounded: LruCache<i32, String> = LruCache::unbounded();
    
    // With custom hasher
    use std::collections::hash_map::RandomState;
    let cache_with_hasher: LruCache<i32, String, RandomState> = 
        LruCache::with_hasher(capacity, RandomState::new());
}

Basic Operations

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // Insert entries
    cache.put("one", 1);
    cache.put("two", 2);
    cache.put("three", 3);
    
    println!("Size: {}", cache.len()); // 3
    
    // Retrieve entries (moves to most-recent position)
    if let Some(&value) = cache.get(&"one") {
        println!("Got 'one': {}", value);
    }
    
    // Check if key exists
    println!("Contains 'two': {}", cache.contains(&"two"));
    
    // Peek without updating LRU order
    if let Some(&value) = cache.peek(&"three") {
        println!("Peeked 'three': {}", value);
    }
    
    // Remove entry
    if let Some((key, value)) = cache.pop(&"two") {
        println!("Removed {}: {}", key, value);
    }
    
    // Clear cache
    cache.clear();
    println!("After clear: {}", cache.len()); // 0
}

Understanding LRU Eviction

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // Add three entries (capacity is 3)
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Initial: {}", cache.len()); // 3
    
    // Add fourth entry - "a" will be evicted (least recently used)
    let evicted = cache.push("d", 4);
    println!("Evicted: {:?}", evicted); // Some(("a", 1))
    println!("After push: {}", cache.len()); // 3
    
    // Access "b" - now it's most recent
    cache.get(&"b");
    
    // Add another entry - "c" will be evicted now
    let evicted = cache.push("e", 5);
    println!("Evicted: {:?}", evicted); // Some(("c", 3))
    
    // Current contents: b, d, e
    println!("\nRemaining entries:");
    for (key, value) in cache.iter() {
        println!("  {} -> {}", key, value);
    }
}

Get vs Peek

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // get() updates LRU order - "a" becomes most recent
    let _ = cache.get(&"a");
    
    // Order now: b (LRU), c, a (MRU)
    
    // peek() does NOT update order
    let _ = cache.peek(&"b");
    
    // Order still: b (LRU), c, a (MRU)
    
    // Adding new entry evicts "b" (the LRU)
    let evicted = cache.push("d", 4);
    println!("Evicted: {:?}", evicted); // Some(("b", 2))
    
    // peek_mut() for mutation without LRU update
    if let Some(value) = cache.peek_mut(&"c") {
        *value = 100;
    }
    println!("c after mutation: {:?}", cache.peek(&"c")); // Some(100)
}

Mutable Access

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<i32, Vec<i32>> = LruCache::new(NonZeroUsize::new(10).unwrap());
    
    cache.put(1, vec![1, 2, 3]);
    cache.put(2, vec![4, 5, 6]);
    
    // get_mut() updates LRU order and returns mutable reference
    if let Some(vec) = cache.get_mut(&1) {
        vec.push(4);
    }
    
    println!("Modified: {:?}", cache.get(&1));
    
    // peek_mut() returns mutable reference WITHOUT updating order
    if let Some(vec) = cache.peek_mut(&2) {
        vec.push(7);
    }
    
    println!("Modified without LRU update: {:?}", cache.peek(&2));
}

Iteration

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    cache.put("d", 4);
    cache.put("e", 5);
    
    // Iterate from most recent to least recent
    println!("Most recent to least recent:");
    for (key, value) in cache.iter() {
        println!("  {} -> {}", key, value);
    }
    
    // Iterate mutably
    println!("\nDouble all values:");
    for (key, value) in cache.iter_mut() {
        *value *= 2;
        println!("  {} -> {}", key, value);
    }
    
    // Get keys and values separately
    println!("\nKeys: {:?}", cache.keys().collect::<Vec<_>>());
    println!("Values: {:?}", cache.values().collect::<Vec<_>>());
}

Entry API

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    // or_insert() - insert if missing
    cache.entry("count").or_insert(0);
    println!("Count: {:?}", cache.get(&"count"));
    
    // or_insert_with() - lazy initialization
    cache.entry("computed").or_insert_with(|| {
        println!("Computing value...");
        42
    });
    
    // and_modify() - modify if exists
    cache.entry("count").and_modify(|v| *v += 1);
    println!("Count after increment: {:?}", cache.get(&"count"));
    
    // Chain operations
    cache.entry("counter")
        .and_modify(|v| *v += 1)
        .or_insert(1);
    
    println!("Counter: {:?}", cache.get(&"counter"));
}

Memoization Example

use lru::LruCache;
use std::num::NonZeroUsize;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
 
fn expensive_computation(n: u64) -> u64 {
    // Simulate expensive work
    std::thread::sleep(std::time::Duration::from_millis(10));
    n * n + n
}
 
struct MemoizedFunction {
    cache: LruCache<u64, u64>,
}
 
impl MemoizedFunction {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn compute(&mut self, n: u64) -> u64 {
        if let Some(&result) = self.cache.get(&n) {
            return result;
        }
        
        let result = expensive_computation(n);
        self.cache.put(n, result);
        result
    }
}
 
fn main() {
    let mut memo = MemoizedFunction::new(100);
    
    let start = std::time::Instant::now();
    
    // First call - computes
    let r1 = memo.compute(42);
    println!("First call: {} ({:?})", r1, start.elapsed());
    
    let start = std::time::Instant::now();
    
    // Second call - cached
    let r2 = memo.compute(42);
    println!("Cached call: {} ({:?})", r2, start.elapsed());
}

HTTP Response Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
 
#[derive(Clone)]
struct CachedResponse {
    body: String,
    status: u16,
    cached_at: Instant,
    ttl: Duration,
}
 
impl CachedResponse {
    fn new(body: String, status: u16, ttl: Duration) -> Self {
        Self {
            body,
            status,
            cached_at: Instant::now(),
            ttl,
        }
    }
    
    fn is_valid(&self) -> bool {
        self.cached_at.elapsed() < self.ttl
    }
}
 
struct HttpCache {
    cache: LruCache<String, CachedResponse>,
}
 
impl HttpCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn get(&mut self, url: &str) -> Option<String> {
        if let Some(response) = self.cache.get(url) {
            if response.is_valid() {
                return Some(response.body.clone());
            } else {
                self.cache.pop(url);
            }
        }
        None
    }
    
    fn set(&mut self, url: String, body: String, status: u16, ttl: Duration) {
        self.cache.put(url, CachedResponse::new(body, status, ttl));
    }
    
    fn invalidate(&mut self, url: &str) {
        self.cache.pop(url);
    }
}
 
fn mock_http_request(url: &str) -> (String, u16) {
    println!("Fetching {} from server...", url);
    (format!("Response from {}", url), 200)
}
 
fn main() {
    let mut cache = HttpCache::new(10);
    let url = "https://api.example.com/users";
    
    // First request - not cached
    if let Some(response) = cache.get(url) {
        println!("Cached: {}", response);
    } else {
        let (body, status) = mock_http_request(url);
        cache.set(url.to_string(), body.clone(), status, Duration::from_secs(60));
        println!("Fetched: {}", body);
    }
    
    // Second request - cached
    if let Some(response) = cache.get(url) {
        println!("Cached: {}", response);
    }
}

Database Query Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
#[derive(Debug, Clone)]
struct User {
    id: u64,
    name: String,
    email: String,
}
 
struct UserCache {
    cache: LruCache<u64, User>,
}
 
impl UserCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn get(&mut self, id: u64) -> Option<&User> {
        self.cache.get(&id)
    }
    
    fn get_cloned(&mut self, id: u64) -> Option<User> {
        self.cache.get(&id).cloned()
    }
    
    fn set(&mut self, user: User) {
        self.cache.put(user.id, user);
    }
    
    fn remove(&mut self, id: u64) -> Option<User> {
        self.cache.pop(&id).map(|(_, v)| v)
    }
    
    fn get_or_load<F>(&mut self, id: u64, loader: F) -> User
    where
        F: FnOnce(u64) -> User,
    {
        if let Some(user) = self.cache.get(&id) {
            return user.clone();
        }
        
        let user = loader(id);
        self.cache.put(id, user.clone());
        user
    }
}
 
fn load_user_from_db(id: u64) -> User {
    println!("Loading user {} from database...", id);
    User {
        id,
        name: format!("User{}", id),
        email: format!("user{}@example.com", id),
    }
}
 
fn main() {
    let mut cache = UserCache::new(100);
    
    // Load on cache miss
    let user = cache.get_or_load(42, load_user_from_db);
    println!("Loaded: {:?}", user);
    
    // Cached - no database call
    let user = cache.get_or_load(42, load_user_from_db);
    println!("Cached: {:?}", user);
}

Rate Limiting with LRU

use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
 
struct RateLimitEntry {
    count: u32,
    window_start: Instant,
}
 
pub struct RateLimiter {
    entries: LruCache<String, RateLimitEntry>,
    max_requests: u32,
    window: Duration,
}
 
impl RateLimiter {
    pub fn new(max_requests: u32, window: Duration, cache_size: usize) -> Self {
        Self {
            entries: LruCache::new(NonZeroUsize::new(cache_size).unwrap()),
            max_requests,
            window,
        }
    }
    
    pub fn check(&mut self, client_id: &str) -> bool {
        let now = Instant::now();
        
        let entry = self.entries.get_or_insert_mut(client_id.to_string(), || {
            RateLimitEntry {
                count: 0,
                window_start: now,
            }
        });
        
        // Reset if window expired
        if now.duration_since(entry.window_start) > self.window {
            entry.count = 0;
            entry.window_start = now;
        }
        
        entry.count += 1;
        entry.count <= self.max_requests
    }
    
    pub fn remaining(&mut self, client_id: &str) -> u32 {
        let now = Instant::now();
        
        if let Some(entry) = self.entries.peek(client_id) {
            if now.duration_since(entry.window_start) <= self.window {
                return self.max_requests.saturating_sub(entry.count);
            }
        }
        self.max_requests
    }
}
 
fn main() {
    let mut limiter = RateLimiter::new(5, Duration::from_secs(60), 1000);
    
    for i in 0..7 {
        let allowed = limiter.check("client_123");
        let remaining = limiter.remaining("client_123");
        println!("Request {}: allowed={}, remaining={}", i + 1, allowed, remaining);
    }
}

File Content Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::path::PathBuf;
use std::time::{Duration, Instant};
 
struct FileContent {
    content: String,
    loaded_at: Instant,
    ttl: Duration,
}
 
struct FileCache {
    cache: LruCache<PathBuf, FileContent>,
    ttl: Duration,
}
 
impl FileCache {
    fn new(capacity: usize, ttl: Duration) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            ttl,
        }
    }
    
    fn read(&mut self, path: &PathBuf) -> std::io::Result<String> {
        // Check cache first
        if let Some(cached) = self.cache.get(path) {
            if cached.loaded_at.elapsed() < cached.ttl {
                return Ok(cached.content.clone());
            }
        }
        
        // Load from disk
        let content = std::fs::read_to_string(path)?;
        
        self.cache.put(path.clone(), FileContent {
            content: content.clone(),
            loaded_at: Instant::now(),
            ttl: self.ttl,
        });
        
        Ok(content)
    }
    
    fn invalidate(&mut self, path: &PathBuf) {
        self.cache.pop(path);
    }
}
 
fn main() -> std::io::Result<()> {
    // Create test file
    let test_file = PathBuf::from("test_cache.txt");
    std::fs::write(&test_file, "Hello, Cache!")?;
    
    let mut cache = FileCache::new(10, Duration::from_secs(60));
    
    // First read - from disk
    let content = cache.read(&test_file)?;
    println!("First read: {}", content);
    
    // Second read - from cache
    let content = cache.read(&test_file)?;
    println!("Cached read: {}", content);
    
    // Cleanup
    std::fs::remove_file(&test_file)?;
    Ok(())
}

Concurrent LRU Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
 
pub struct ConcurrentLruCache<K, V> {
    inner: Mutex<LruCache<K, V>>,
}
 
impl<K: std::hash::Hash + Eq + Clone, V: Clone> ConcurrentLruCache<K, V> {
    pub fn new(capacity: usize) -> Self {
        Self {
            inner: Mutex::new(LruCache::new(NonZeroUsize::new(capacity).unwrap())),
        }
    }
    
    pub fn get(&self, key: &K) -> Option<V> {
        self.inner.lock().unwrap().get(key).cloned()
    }
    
    pub fn put(&self, key: K, value: V) -> Option<V> {
        self.inner.lock().unwrap().put(key, value)
    }
    
    pub fn get_or_insert<F>(&self, key: K, f: F) -> V
    where
        F: FnOnce() -> V,
    {
        let mut cache = self.inner.lock().unwrap();
        if let Some(value) = cache.get(&key) {
            return value.clone();
        }
        
        let value = f();
        cache.put(key, value.clone());
        value
    }
    
    pub fn remove(&self, key: &K) -> Option<V> {
        self.inner.lock().unwrap().pop(key).map(|(_, v)| v)
    }
    
    pub fn len(&self) -> usize {
        self.inner.lock().unwrap().len()
    }
}
 
fn main() {
    let cache = Arc::new(ConcurrentLruCache::<i32, String>::new(100));
    
    let handles: Vec<_> = (0..4)
        .map(|t| {
            let cache = Arc::clone(&cache);
            std::thread::spawn(move || {
                for i in 0..10 {
                    let key = t * 10 + i;
                    cache.put(key, format!("value_{}", key));
                }
            })
        })
        .collect();
    
    for handle in handles {
        handle.join().unwrap();
    }
    
    println!("Cache size: {}", cache.len());
}

Weighted LRU Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
struct WeightedEntry<V> {
    value: V,
    weight: usize,
}
 
pub struct WeightedLruCache<K, V> {
    cache: LruCache<K, WeightedEntry<V>>,
    max_weight: usize,
    current_weight: usize,
}
 
impl<K: std::hash::Hash + Eq + Clone, V: Clone> WeightedLruCache<K, V> {
    pub fn new(capacity: usize, max_weight: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            max_weight,
            current_weight: 0,
        }
    }
    
    pub fn put(&mut self, key: K, value: V, weight: usize) -> Option<V> {
        // If key exists, subtract old weight
        if let Some(old) = self.cache.peek(&key) {
            self.current_weight -= old.weight;
        }
        
        // Evict entries until we have space
        while self.current_weight + weight > self.max_weight && self.cache.len() > 0 {
            if let Some((_, entry)) = self.cache.pop_lru() {
                self.current_weight -= entry.weight;
            }
        }
        
        self.current_weight += weight;
        self.cache.put(key, WeightedEntry { value, weight }).map(|e| e.value)
    }
    
    pub fn get(&mut self, key: &K) -> Option<&V> {
        self.cache.get(key).map(|e| &e.value)
    }
    
    pub fn current_weight(&self) -> usize {
        self.current_weight
    }
}
 
fn main() {
    let mut cache = WeightedLruCache::new(100, 1000);
    
    cache.put("small", "tiny", 100);
    cache.put("medium", "medium value", 300);
    cache.put("large", "large value here", 500);
    
    println!("Current weight: {}", cache.current_weight());
    
    // This will trigger eviction
    cache.put("huge", "huge value", 600);
    println!("After adding huge: weight = {}", cache.current_weight());
    
    println!("Small cached: {:?}", cache.get(&"small"));
    println!("Huge cached: {:?}", cache.get(&"huge"));
}

Two-Level Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
pub struct TwoLevelCache<K, V> {
    hot: LruCache<K, V>,      // Small, frequently accessed
    cold: LruCache<K, V>,    // Larger, less frequently accessed
}
 
impl<K: std::hash::Hash + Eq + Clone, V: Clone> TwoLevelCache<K, V> {
    pub fn new(hot_size: usize, cold_size: usize) -> Self {
        Self {
            hot: LruCache::new(NonZeroUsize::new(hot_size).unwrap()),
            cold: LruCache::new(NonZeroUsize::new(cold_size).unwrap()),
        }
    }
    
    pub fn get(&mut self, key: &K) -> Option<V> {
        // Check hot cache first
        if let Some(value) = self.hot.get(key) {
            return Some(value.clone());
        }
        
        // Check cold cache
        if let Some(value) = self.cold.get(key) {
            // Promote to hot cache
            let value = value.clone();
            self.hot.put(key.clone(), value.clone());
            return Some(value);
        }
        
        None
    }
    
    pub fn put(&mut self, key: K, value: V) {
        // Insert into cold cache first
        if let Some(evicted) = self.cold.push(key.clone(), value.clone()) {
            // Cold cache evicted - could promote to hot or discard
            println!("Evicted from cold: {:?}", evicted.0);
        }
    }
    
    pub fn len(&self) -> usize {
        self.hot.len() + self.cold.len()
    }
}
 
fn main() {
    let mut cache = TwoLevelCache::<&str, String>::new(3, 10);
    
    // Insert items
    cache.put("a", "value_a".to_string());
    cache.put("b", "value_b".to_string());
    cache.put("c", "value_c".to_string());
    
    // Access "a" multiple times - it gets promoted to hot cache
    for _ in 0..3 {
        let _ = cache.get(&"a");
    }
    
    println!("Total items: {}", cache.len());
}

Resizing Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<i32, &str> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    // Fill cache
    for i in 0..5 {
        cache.put(i, "value");
    }
    println!("Size: {} (capacity: {})", cache.len(), cache.cap());
    
    // Resize to smaller - evicts least recently used
    cache.resize(NonZeroUsize::new(3).unwrap());
    println!("After resize to 3: size = {}, capacity = {}", cache.len(), cache.cap());
    
    // Resize to larger
    cache.resize(NonZeroUsize::new(10).unwrap());
    println!("After resize to 10: capacity = {}", cache.cap());
}

Cache Statistics

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::atomic::{AtomicU64, Ordering};
 
pub struct StatsLruCache<K, V> {
    cache: LruCache<K, V>,
    hits: AtomicU64,
    misses: AtomicU64,
}
 
impl<K: std::hash::Hash + Eq, V: Clone> StatsLruCache<K, V> {
    pub fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            hits: AtomicU64::new(0),
            misses: AtomicU64::new(0),
        }
    }
    
    pub fn get(&mut self, key: &K) -> Option<V> {
        match self.cache.get(key) {
            Some(value) => {
                self.hits.fetch_add(1, Ordering::Relaxed);
                Some(value.clone())
            }
            None => {
                self.misses.fetch_add(1, Ordering::Relaxed);
                None
            }
        }
    }
    
    pub fn put(&mut self, key: K, value: V) -> Option<V> {
        self.cache.put(key, value)
    }
    
    pub fn stats(&self) -> (u64, u64, f64) {
        let hits = self.hits.load(Ordering::Relaxed);
        let misses = self.misses.load(Ordering::Relaxed);
        let total = hits + misses;
        let hit_rate = if total > 0 { hits as f64 / total as f64 } else { 0.0 };
        (hits, misses, hit_rate)
    }
}
 
fn main() {
    let mut cache = StatsLruCache::<&str, String>::new(10);
    
    cache.put("key1", "value1".to_string());
    
    // Hits
    cache.get(&"key1");
    cache.get(&"key1");
    
    // Misses
    cache.get(&"key2");
    cache.get(&"key3");
    
    let (hits, misses, hit_rate) = cache.stats();
    println!("Hits: {}, Misses: {}, Hit rate: {:.1}%", hits, misses, hit_rate * 100.0);
}

Summary

  • LruCache::new(capacity) creates a cache with bounded capacity (NonZeroUsize)
  • cache.put(key, value) inserts, evicting LRU entry if at capacity
  • cache.get(&key) retrieves and promotes entry to most-recent position
  • cache.peek(&key) retrieves without updating LRU order
  • cache.get_mut(&key) and cache.peek_mut(&key) for mutable access
  • cache.push(key, value) returns evicted entry if any
  • cache.pop(&key) removes and returns entry
  • cache.iter() iterates from most-recent to least-recent
  • cache.entry(key) provides entry API for conditional operations
  • O(1) complexity for get, put, and eviction operations
  • Wrap in Mutex or use DashMap for concurrent access
  • Perfect for: memoization, HTTP caching, database query caching, rate limiting, file caching