How do I implement an LRU cache in Rust?

Walkthrough

The lru crate provides a fast, efficient LRU (Least Recently Used) cache implementation in Rust. An LRU cache automatically evicts the least recently used items when it reaches capacity, making it ideal for caching scenarios where you want to limit memory usage while keeping frequently accessed items available. The crate uses a HashMap for O(1) lookups combined with a doubly-linked list for O(1) eviction.

Key features:

  1. O(1) operations — get, put, and eviction are all constant time
  2. Bounded capacity — automatically evicts when full
  3. get_mut — mutable access to cached values
  4. Iteration — iterate from most to least recently used
  5. Custom hasher — supports custom hash functions

Code Example

# Cargo.toml
[dependencies]
lru = "0.12"
use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Cache: {:?}", cache);  // {c: 3, b: 2, a: 1}
    
    cache.put("d", 4);  // Evicts "a" (least recently used)
    
    println!("After inserting d: {:?}", cache);  // {d: 4, c: 3, b: 2}
    
    println!("Get b: {:?}", cache.get(&"b"));  // Some(2)
    println!("Cache after get: {:?}", cache);   // {b: 2, d: 4, c: 3}
}

Basic Usage

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    // Create cache with capacity of 5
    let capacity = NonZeroUsize::new(5).unwrap();
    let mut cache: LruCache<String, i32> = LruCache::new(capacity);
    
    // Insert items
    cache.put("one".to_string(), 1);
    cache.put("two".to_string(), 2);
    cache.put("three".to_string(), 3);
    
    // Get items
    if let Some(value) = cache.get(&"one".to_string()) {
        println!("Got: {}", value);  // Got: 1
    }
    
    // Get mutably
    if let Some(value) = cache.get_mut(&"two".to_string()) {
        *value *= 10;
        println!("Modified: {}", value);  // Modified: 20
    }
    
    // Check if contains
    println!("Contains 'three': {}", cache.contains(&"three".to_string()));
    
    // Peek without updating LRU order
    if let Some(value) = cache.peek(&"one".to_string()) {
        println!("Peeked: {}", value);  // Peeked: 1
    }
    
    // Current size
    println!("Size: {}", cache.len());
    println!("Capacity: {}", cache.cap().get());
    
    // Remove item
    let removed = cache.pop(&"two".to_string());
    println!("Removed: {:?}", removed);  // Some(20)
    
    println!("Size after pop: {}", cache.len());
}

LRU Eviction Behavior

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // Fill cache
    cache.put("a", 1);  // [a]
    cache.put("b", 2);  // [b, a]
    cache.put("c", 3);  // [c, b, a]
    
    println!("Initial cache:");
    for (k, v) in cache.iter() {
        println!("  {} => {}", k, v);
    }
    
    // Access 'a' - now it's most recently used
    cache.get(&"a");  // [a, c, b]
    
    println!("\nAfter accessing 'a':");
    for (k, v) in cache.iter() {
        println!("  {} => {}", k, v);
    }
    
    // Insert new item - 'b' is evicted (least recently used)
    cache.put("d", 4);  // [d, a, c] - 'b' evicted
    
    println!("\nAfter inserting 'd' (b evicted):");
    for (k, v) in cache.iter() {
        println!("  {} => {}", k, v);
    }
    
    // 'b' is no longer in cache
    println!("\nContains 'b': {}", cache.contains(&"b"));  // false
    
    // Insert another - 'c' is evicted
    cache.put("e", 5);  // [e, d, a] - 'c' evicted
    
    println!("\nAfter inserting 'e' (c evicted):");
    for (k, v) in cache.iter() {
        println!("  {} => {}", k, v);
    }
}

Put Returns Evicted Value

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(2).unwrap());
    
    // Initial inserts don't evict
    let evicted = cache.put("a", 1);
    println!("Evicted: {:?}", evicted);  // None
    
    let evicted = cache.put("b", 2);
    println!("Evicted: {:?}", evicted);  // None
    
    // Insert when full - returns evicted item
    let evicted = cache.put("c", 3);
    println!("Evicted: {:?}", evicted);  // Some(("a", 1))
    
    // Update existing key - no eviction, returns old value
    let old = cache.put("b", 20);
    println!("Old value: {:?}", old);  // Some(2)
    
    // Cache is now: {b: 20, c: 3}
    for (k, v) in cache.iter() {
        println!("{} => {}", k, v);
    }
}

Peek vs Get

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Order before operations:");
    for (k, _) in cache.iter() {
        print!("{} ", k);  // c b a
    }
    println!();
    
    // get() updates LRU order
    cache.get(&"a");
    println!("\nAfter get('a'):");
    for (k, _) in cache.iter() {
        print!("{} ", k);  // a c b
    }
    println!();
    
    // peek() does NOT update LRU order
    cache.peek(&"c");
    println!("\nAfter peek('c'):");
    for (k, _) in cache.iter() {
        print!("{} ", k);  // a c b (unchanged)
    }
    println!();
    
    // peek_mut() also doesn't update order
    if let Some(value) = cache.peek_mut(&"b") {
        *value *= 10;
    }
    println!("\nAfter peek_mut('b'):");
    for (k, v) in cache.iter() {
        print!("{}=>{} ", k, v);  // a c b=>20
    }
    println!();
}

Iteration

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    cache.put("d", 4);
    cache.put("e", 5);
    
    // Iterate from most to least recently used
    println!("Forward iteration (MRU to LRU):");
    for (key, value) in cache.iter() {
        println!("  {} => {}", key, value);
    }
    
    // Iterate mutably
    println!("\nMutable iteration (doubling values):");
    for (key, value) in cache.iter_mut() {
        *value *= 2;
        println!("  {} => {}", key, value);
    }
    
    // Keys only
    println!("\nKeys only:");
    for key in cache.keys() {
        println!("  {}", key);
    }
    
    // Values only
    println!("\nValues only:");
    for value in cache.values() {
        println!("  {}", value);
    }
}

Capacity Management

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Size: {}", cache.len());
    println!("Capacity: {}", cache.cap().get());
    println!("Is empty: {}", cache.is_empty());
    
    // Increase capacity
    cache.resize(NonZeroUsize::new(5).unwrap());
    println!("\nAfter resize to 5:");
    println!("Capacity: {}", cache.cap().get());
    
    cache.put("d", 4);
    cache.put("e", 5);
    println!("Size: {}", cache.len());
    
    // Decrease capacity - evicts excess items
    let evicted = cache.resize(NonZeroUsize::new(2).unwrap());
    println!("\nAfter resize to 2:");
    println!("Capacity: {}", cache.cap().get());
    println!("Evicted items:");
    for (k, v) in evicted {
        println!("  {} => {}", k, v);
    }
    println!("Remaining size: {}", cache.len());
    
    // Clear cache
    cache.clear();
    println!("\nAfter clear:");
    println!("Is empty: {}", cache.is_empty());
}

Push and Pop LRU

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // push_lru adds to least recently used position
    // Unlike put(), push_lru doesn't evict when full
    cache.push("a", 1);
    cache.push("b", 2);
    cache.push("c", 3);
    
    println!("After pushes:");
    for (k, v) in cache.iter() {
        println!("  {} => {}", k, v);
    }
    
    // When full, push_lru returns the new item without inserting
    let rejected = cache.push("d", 4);
    println!("\nRejected when full: {:?}", rejected);  // Some(("d", 4))
    
    // pop_lru removes least recently used item
    let lru_item = cache.pop_lru();
    println!("\nPopped LRU: {:?}", lru_item);  // Some(("a", 1))
    
    // Now we can push again
    let rejected = cache.push("d", 4);
    println!("Rejected after pop: {:?}", rejected);  // None
    
    // pop_most removes most recently used item
    let mru_item = cache.pop_most();
    println!("Popped MRU: {:?}", mru_item);  // Some(("d", 4))
    
    println!("\nRemaining:");
    for (k, v) in cache.iter() {
        println!("  {} => {}", k, v);
    }
}

Custom Key Types

use lru::LruCache;
use std::num::NonZeroUsize;
 
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct CacheKey {
    user_id: u32,
    resource_type: String,
}
 
impl CacheKey {
    fn new(user_id: u32, resource_type: &str) -> Self {
        Self {
            user_id,
            resource_type: resource_type.to_string(),
        }
    }
}
 
#[derive(Debug, Clone)]
struct CacheValue {
    data: String,
    timestamp: u64,
}
 
fn main() {
    let mut cache: LruCache<CacheKey, CacheValue> = 
        LruCache::new(NonZeroUsize::new(100).unwrap());
    
    let key1 = CacheKey::new(1, "profile");
    let key2 = CacheKey::new(1, "settings");
    let key3 = CacheKey::new(2, "profile");
    
    cache.put(key1.clone(), CacheValue {
        data: "User 1 profile".to_string(),
        timestamp: 1000,
    });
    
    cache.put(key2.clone(), CacheValue {
        data: "User 1 settings".to_string(),
        timestamp: 1001,
    });
    
    cache.put(key3.clone(), CacheValue {
        data: "User 2 profile".to_string(),
        timestamp: 1002,
    });
    
    // Look up by key
    if let Some(value) = cache.get(&key1) {
        println!("Found: {:?}", value);
    }
    
    // Check specific combination
    let search_key = CacheKey::new(1, "profile");
    println!("Contains user 1 profile: {}", cache.contains(&search_key));
    
    // Different key
    let search_key = CacheKey::new(2, "settings");
    println!("Contains user 2 settings: {}", cache.contains(&search_key));
}

Custom Hasher

use lru::LruCache;
use std::hash::BuildHasherDefault;
use std::num::NonZeroUsize;
use twox_hash::XxHash64;
 
// Note: Add twox-hash to Cargo.toml for this example
// [dependencies]
// twox-hash = "1.6"
 
fn main() {
    // Create LRU cache with custom hasher
    let mut cache: LruCache<&str, i32, BuildHasherDefault<XxHash64>> = 
        LruCache::unbounded_with_hasher(BuildHasherDefault::default());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Cache with custom hasher:");
    for (k, v) in cache.iter() {
        println!("  {} => {}", k, v);
    }
}
 
// Simple FNV hasher example (no external dependency)
mod fnv {
    use std::hash::{Hasher, BuildHasherDefault};
    
    pub type FnvHasher = BuildHasherDefault<FnvHasherInner>;
    
    #[derive(Default)]
    pub struct FnvHasherInner(u64);
    
    const FNV_PRIME: u64 = 1099511628211;
    const FNV_OFFSET_BASIS: u64 = 14695981039346656037;
    
    impl Hasher for FnvHasherInner {
        fn finish(&self) -> u64 {
            self.0
        }
        
        fn write(&mut self, bytes: &[u8]) {
            self.0 = FNV_OFFSET_BASIS;
            for byte in bytes {
                self.0 ^= *byte as u64;
                self.0 = self.0.wrapping_mul(FNV_PRIME);
            }
        }
    }
}

Real-World Example: HTTP Response Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
 
#[derive(Clone)]
struct CachedResponse {
    body: String,
    status: u16,
    headers: Vec<(String, String)>,
    cached_at: Instant,
    ttl: Duration,
}
 
impl CachedResponse {
    fn new(body: String, status: u16, ttl: Duration) -> Self {
        Self {
            body,
            status,
            headers: Vec::new(),
            cached_at: Instant::now(),
            ttl,
        }
    }
    
    fn is_expired(&self) -> bool {
        self.cached_at.elapsed() > self.ttl
    }
}
 
struct HttpCache {
    cache: LruCache<String, CachedResponse>,
    hits: u64,
    misses: u64,
}
 
impl HttpCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            hits: 0,
            misses: 0,
        }
    }
    
    fn get(&mut self, url: &str) -> Option<&CachedResponse> {
        // Check if cached and not expired
        if let Some(response) = self.cache.get(url) {
            if !response.is_expired() {
                self.hits += 1;
                return Some(response);
            }
        }
        self.misses += 1;
        None
    }
    
    fn put(&mut self, url: String, response: CachedResponse) {
        self.cache.put(url, response);
    }
    
    fn invalidate(&mut self, url: &str) {
        self.cache.pop(url);
    }
    
    fn clear(&mut self) {
        self.cache.clear();
    }
    
    fn stats(&self) -> (u64, u64, f64) {
        let total = self.hits + self.misses;
        let hit_rate = if total > 0 {
            self.hits as f64 / total as f64
        } else {
            0.0
        };
        (self.hits, self.misses, hit_rate)
    }
}
 
fn main() {
    let mut cache = HttpCache::new(100);
    
    // Simulate caching responses
    cache.put(
        "https://api.example.com/users/1".to_string(),
        CachedResponse::new(
            "{\"id\": 1, \"name\": \"Alice\"}".to_string(),
            200,
            Duration::from_secs(300),
        ),
    );
    
    cache.put(
        "https://api.example.com/users/2".to_string(),
        CachedResponse::new(
            "{\"id\": 2, \"name\": \"Bob\"}".to_string(),
            200,
            Duration::from_secs(300),
        ),
    );
    
    // Get cached response
    if let Some(response) = cache.get("https://api.example.com/users/1") {
        println!("Cache hit: {}", response.body);
    }
    
    // Check stats
    let (hits, misses, hit_rate) = cache.stats();
    println!("Hits: {}, Misses: {}, Hit rate: {:.2}%", hits, misses, hit_rate * 100.0);
}

Real-World Example: Memoization Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::hash::Hash;
 
struct MemoCache<K, V, F>
where
    K: Hash + Eq + Clone,
    F: Fn(K) -> V,
{
    cache: LruCache<K, V>,
    compute: F,
}
 
impl<K, V, F> MemoCache<K, V, F>
where
    K: Hash + Eq + Clone,
    F: Fn(K) -> V,
{
    fn new(capacity: usize, compute: F) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            compute,
        }
    }
    
    fn get(&mut self, key: K) -> &V {
        if !self.cache.contains(&key) {
            let value = (self.compute)(key.clone());
            self.cache.put(key, value);
        }
        self.cache.get(&key).unwrap()
    }
}
 
fn main() {
    // Memoize expensive computation
    let mut fib_cache = MemoCache::new(100, |n: u32| -> u64 {
        if n <= 1 {
            return n as u64;
        }
        // This would be inefficient without memoization
        let mut a = 0u64;
        let mut b = 1u64;
        for _ in 2..=n {
            let temp = a + b;
            a = b;
            b = temp;
        }
        b
    });
    
    println!("fib(10) = {}", fib_cache.get(10));
    println!("fib(20) = {}", fib_cache.get(20));
    println!("fib(50) = {}", fib_cache.get(50));
    
    // Memoize string transformation
    let mut transform_cache = MemoCache::new(50, |s: String| -> String {
        s.to_uppercase()
    });
    
    println!("Transform: {}", transform_cache.get("hello".to_string()));
    println!("Transform: {}", transform_cache.get("world".to_string()));
}

Real-World Example: Database Query Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
 
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct QueryKey {
    table: String,
    conditions: String,
}
 
#[derive(Debug, Clone)]
struct QueryResult {
    rows: Vec<String>,
    execution_time_ms: u64,
}
 
struct QueryCache {
    cache: LruCache<QueryKey, QueryResult>,
    queries_executed: u64,
    cache_hits: u64,
}
 
impl QueryCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            queries_executed: 0,
            cache_hits: 0,
        }
    }
    
    fn execute<F>(&mut self, table: &str, conditions: &str, executor: F) -> QueryResult
    where
        F: FnOnce() -> QueryResult,
    {
        let key = QueryKey {
            table: table.to_string(),
            conditions: conditions.to_string(),
        };
        
        if let Some(result) = self.cache.get(&key) {
            self.cache_hits += 1;
            return result.clone();
        }
        
        self.queries_executed += 1;
        let result = executor();
        self.cache.put(key, result.clone());
        result
    }
    
    fn invalidate_table(&mut self, table: &str) {
        let keys_to_remove: Vec<_> = self.cache
            .iter()
            .filter(|(k, _)| k.table == table)
            .map(|(k, _)| k.clone())
            .collect();
        
        for key in keys_to_remove {
            self.cache.pop(&key);
        }
    }
    
    fn stats(&self) -> (u64, u64, u64) {
        (self.queries_executed, self.cache_hits, self.cache.len() as u64)
    }
}
 
fn main() {
    let mut cache = QueryCache::new(50);
    
    // Simulate queries
    let result = cache.execute("users", "age > 18", || QueryResult {
        rows: vec!["Alice".to_string(), "Bob".to_string()],
        execution_time_ms: 50,
    });
    println!("Query 1: {:?}", result);
    
    // Same query - cache hit
    let result = cache.execute("users", "age > 18", || QueryResult {
        rows: vec!["Should not execute".to_string()],
        execution_time_ms: 0,
    });
    println!("Query 2 (cached): {:?}", result);
    
    // Different conditions
    let result = cache.execute("users", "age < 18", || QueryResult {
        rows: vec!["Charlie".to_string()],
        execution_time_ms: 45,
    });
    println!("Query 3: {:?}", result);
    
    // Invalidate table
    cache.invalidate_table("users");
    
    let (executed, hits, size) = cache.stats();
    println!("Stats: {} executed, {} hits, {} cached", executed, hits, size);
}

Real-World Example: Thread-Safe LRU Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use std::thread;
 
struct ThreadSafeCache<K, V> {
    cache: Mutex<LruCache<K, V>>,
}
 
impl<K, V> ThreadSafeCache<K, V>
where
    K: std::hash::Hash + Eq + Clone,
{
    fn new(capacity: usize) -> Self {
        Self {
            cache: Mutex::new(LruCache::new(NonZeroUsize::new(capacity).unwrap())),
        }
    }
    
    fn get(&self, key: &K) -> Option<V>
    where
        V: Clone,
    {
        let mut cache = self.cache.lock().unwrap();
        cache.get(key).cloned()
    }
    
    fn put(&self, key: K, value: V) -> Option<(K, V)>
    where
        K: Clone,
    {
        let mut cache = self.cache.lock().unwrap();
        cache.put(key, value)
    }
    
    fn contains(&self, key: &K) -> bool {
        let mut cache = self.cache.lock().unwrap();
        cache.contains(key)
    }
    
    fn len(&self) -> usize {
        let cache = self.cache.lock().unwrap();
        cache.len()
    }
}
 
fn main() {
    let cache = Arc::new(ThreadSafeCache::new(100));
    let mut handles = vec![];
    
    // Writer threads
    for i in 0..5 {
        let cache = Arc::clone(&cache);
        handles.push(thread::spawn(move || {
            for j in 0..20 {
                let key = format!("key-{}-{}", i, j);
                let value = format!("value-{}-{}", i, j);
                cache.put(key, value);
            }
        }));
    }
    
    // Reader threads
    for i in 0..3 {
        let cache = Arc::clone(&cache);
        handles.push(thread::spawn(move || {
            for j in 0..20 {
                let key = format!("key-{}-{}", j % 5, j);
                if let Some(value) = cache.get(&key) {
                    println!("Thread {} got: {}", i, value);
                }
            }
        }));
    }
    
    for handle in handles {
        handle.join().unwrap();
    }
    
    println!("Final cache size: {}", cache.len());
}

Unbounded Cache

use lru::LruCache;
 
fn main() {
    // Create unbounded cache (no automatic eviction)
    let mut cache: LruCache<&str, i32> = LruCache::unbounded();
    
    // Add many items
    for i in 0..1000 {
        let key = Box::leak(format!("key{}", i).into_boxed_str());
        cache.put(key, i);
    }
    
    println!("Unbounded cache size: {}", cache.len());
    
    // Items are still ordered by access
    cache.get(&"key0");
    
    println!("Most recently used after get:");
    for (k, _) in cache.iter().take(3) {
        println!("  {}", k);
    }
    
    // Can manually resize later
    cache.resize(std::num::NonZeroUsize::new(100).unwrap());
    println!("After resize to 100: {} items", cache.len());
}

Summary

  • Create LRU cache with LruCache::new(NonZeroUsize::new(capacity).unwrap())
  • Use put() to insert items, returns evicted (key, value) if full
  • Use get() to retrieve and update LRU order, peek() to retrieve without updating
  • Use get_mut() and peek_mut() for mutable access
  • Eviction happens automatically when cache is at capacity
  • Use pop() to remove specific item, pop_lru() for least recent, pop_most() for most recent
  • Iterate with iter(), iter_mut(), keys(), values()
  • Use resize() to change capacity (returns evicted items)
  • Use unbounded() for cache without capacity limit
  • Supports custom hashers via LruCache::new_with_hasher()
  • Combine with Mutex or parking_lot::Mutex for thread-safe access
  • O(1) time complexity for all operations
  • Ideal for: HTTP caches, memoization, database query caches, any bounded cache scenario