How do I use an LRU cache in Rust?

Walkthrough

The lru crate provides a fast and simple LRU (Least Recently Used) cache implementation. An LRU cache evicts the least recently accessed items when the cache reaches capacity. This is useful for caching frequently accessed data with memory bounds, such as database query results, computed values, or web responses. The crate provides O(1) average time complexity for get, put, and evict operations using a HashMap combined with a doubly-linked list.

Key concepts:

  1. LruCache<K, V> — the cache type with key-value pairs
  2. Capacity — maximum number of items before eviction
  3. put()/get() — insert and retrieve items
  4. Automatic eviction — least recently used items evicted on capacity
  5. peek() — access without updating recency
  6. Entry API — for conditional operations like "get or insert"

Code Example

# Cargo.toml
[dependencies]
lru = "0.12"
use lru::LruCache;
 
fn main() {
    // Create cache with capacity of 3
    let mut cache: LruCache<&str, i32> = LruCache::new(3);
    
    // Insert items
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // Cache is full, next insert evicts least recently used
    cache.put("d", 4);  // "a" is evicted
    
    println!("Contains 'a': {}", cache.contains(&"a")); // false
    println!("Contains 'b': {}", cache.contains(&"b")); // true
}

Basic Operations

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(5);
    
    // Insert items
    cache.put("one", 1);
    cache.put("two", 2);
    cache.put("three", 3);
    
    // Get item (returns Option<&V>)
    if let Some(&value) = cache.get(&"one") {
        println!("one = {}", value);
    }
    
    // Get mutates access order ("one" is now most recent)
    // Try pushing more items
    cache.put("four", 4);
    cache.put("five", 5);
    cache.put("six", 6); // Evicts "two" (least recently used)
    
    println!("Contains 'two': {}", cache.contains(&"two"));
    
    // Check size
    println!("Cache size: {}", cache.len());
    println!("Cache capacity: {}", cache.cap());
    
    // Clear cache
    cache.clear();
    println!("After clear: {}", cache.len());
}

Get and Get Mut

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, Vec<i32>> = LruCache::new(3);
    
    cache.put("numbers", vec![1, 2, 3]);
    
    // Immutable access
    if let Some(vec) = cache.get(&"numbers") {
        println!("Numbers: {:?}", vec);
    }
    
    // Mutable access
    if let Some(vec) = cache.get_mut(&"numbers") {
        vec.push(4);
        vec.push(5);
    }
    
    println!("Updated: {:?}", cache.get(&"numbers").unwrap());
    
    // Note: get() and get_mut() promote the item to most-recently-used
}

Peek Without Promoting

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(3);
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // get() promotes to most recently used
    cache.get(&"a"); // Now: b (LRU), c, a (MRU)
    
    // peek() does NOT promote
    let value = cache.peek(&"b");
    println!("Peeked b: {:?}", value);
    
    // peek_mut() for mutable access without promotion
    if let Some(val) = cache.peek_mut(&"b") {
        *val += 10;
    }
    
    println!("After peek_mut: {:?}", cache.peek(&"b"));
    
    // b is still the LRU item
    cache.put("d", 4); // Evicts b
    
    println!("After inserting d:");
    println!("  b exists: {}", cache.contains(&"b"));
    println!("  a exists: {}", cache.contains(&"a"));
}

Push with Return

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(2);
    
    // push() returns the evicted item if any
    cache.put("a", 1);
    cache.put("b", 2);
    
    // This will evict "a" (LRU)
    let evicted = cache.push("c", 3);
    
    match evicted {
        Some((key, value)) => println!("Evicted: {} = {}", key, value),
        None => println!("Nothing evicted"),
    }
    
    // push() is like put() but returns the evicted item
    // Useful when you need to know what was evicted
    cache.put("d", 4);
    let evicted = cache.push("e", 5);
    println!("Evicted: {:?}", evicted);
}

Pop and Remove

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(5);
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // pop() removes and returns the LRU item
    let lru_item = cache.pop();
    println!("Popped LRU: {:?}", lru_item); // Some(("a", 1))
    
    // pop_lru() is an alias for pop()
    let another = cache.pop_lru();
    println!("Popped another LRU: {:?}", another);
    
    // remove() removes a specific key
    cache.put("d", 4);
    cache.put("e", 5);
    
    let removed = cache.remove(&"c");
    println!("Removed c: {:?}", removed);
    
    println!("Remaining items: {}", cache.len());
}

Iteration

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(5);
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    cache.get(&"a"); // Promote "a" to MRU
    
    // Iterate from MRU to LRU
    println!("MRU to LRU:");
    for (key, value) in cache.iter() {
        println!("  {} = {}", key, value);
    }
    // Output: c, b, a (c is MRU after a was accessed and promoted... wait, let me trace:
    // put(a) -> a
    // put(b) -> b, a (b is MRU)
    // put(c) -> c, b, a (c is MRU)
    // get(a) -> a, c, b (a is MRU)
    // So iter order is: a, c, b
    
    // Iterate mutably
    println!("\nMutating all values:");
    for (key, value) in cache.iter_mut() {
        *value *= 10;
        println!("  {} = {}", key, value);
    }
    
    // Get all keys (MRU to LRU)
    let keys: Vec<_> = cache.keys().collect();
    println!("\nKeys: {:?}", keys);
    
    // Get all values
    let values: Vec<_> = cache.values().collect();
    println!("Values: {:?}", values);
}

Entry API

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(5);
    
    // get_or_insert - insert if missing
    let value = cache.get_or_insert("a", || 100);
    println!("Got or inserted: {}", value);
    
    // Already exists now
    let value = cache.get_or_insert("a", || 200);
    println!("Got existing: {}", value); // Still 100
    
    // get_or_insert_mut - returns mutable reference
    if let Some(val) = cache.get_or_insert_mut("b", || 0) {
        *val += 50;
    }
    
    println!("b: {:?}", cache.get(&"b"));
    
    // Using entry API with or_insert
    cache.put("c", 30);
    
    // Entry pattern for complex logic
    let key = "d";
    if !cache.contains(&key) {
        cache.put(key, compute_expensive_value());
    }
    
    // Or more idiomatically:
    cache.get_or_insert("e", || compute_expensive_value());
}
 
fn compute_expensive_value() -> i32 {
    println!("Computing...");
    42
}

Capacity Management

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<i32, i32> = LruCache::new(3);
    
    // Current capacity
    println!("Capacity: {}", cache.cap());
    
    // Change capacity
    cache.resize(5);
    println!("After resize to 5: {}", cache.cap());
    
    // Add items
    for i in 0..5 {
        cache.put(i, i * 10);
    }
    println!("Size: {}", cache.len());
    
    // Shrink capacity (evicts LRU items if needed)
    cache.resize(2);
    println!("After resize to 2: size={}", cache.len());
    println!("Contains 0: {}", cache.contains(&0));
    println!("Contains 4: {}", cache.contains(&4));
    
    // Create with unbounded capacity (practically)
    let unbounded: LruCache<i32, i32> = LruCache::unbounded();
    println!("Unbounded capacity: {:?}", unbounded.cap());
}

Contains and Len

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(5);
    
    cache.put("a", 1);
    cache.put("b", 2);
    
    // Check if key exists
    println!("Contains 'a': {}", cache.contains(&"a"));
    println!("Contains 'c': {}", cache.contains(&"c"));
    
    // Number of items
    println!("Length: {}", cache.len());
    
    // Check if empty
    println!("Is empty: {}", cache.is_empty());
    
    // Clear all items
    cache.clear();
    println!("After clear: {}", cache.len());
    println!("Is empty: {}", cache.is_empty());
}

Iterating by Recency

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(5);
    
    cache.put("first", 1);
    cache.put("second", 2);
    cache.put("third", 3);
    
    // Access order: first (LRU), second, third (MRU)
    println!("Initial order (LRU to MRU):");
    for (k, v) in cache.iter().rev() {
        println!("  {} = {}", k, v);
    }
    
    // Access "first" - promotes to MRU
    cache.get(&"first");
    
    println!("\nAfter accessing 'first':");
    for (k, v) in cache.iter() {
        println!("  {} = {}", k, v);
    }
    // Order now: second (LRU), third, first (MRU)
}

Most and Least Recent

use lru::LruCache;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(5);
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // peek_lru() - get least recently used without promoting
    if let Some((key, value)) = cache.peek_lru() {
        println!("LRU item: {} = {}", key, value);
    }
    
    // peek_mru() - get most recently used without promoting
    if let Some((key, value)) = cache.peek_mru() {
        println!("MRU item: {} = {}", key, value);
    }
    
    // Promote 'a' to MRU
    cache.get(&"a");
    
    println!("\nAfter promoting 'a':");
    println!("LRU: {:?}", cache.peek_lru());
    println!("MRU: {:?}", cache.peek_mru());
}

Custom Types as Keys

use lru::LruCache;
use std::hash::Hash;
 
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct CacheKey {
    user_id: u32,
    resource: String,
}
 
impl CacheKey {
    fn new(user_id: u32, resource: &str) -> Self {
        Self {
            user_id,
            resource: resource.to_string(),
        }
    }
}
 
#[derive(Debug, Clone)]
struct Resource {
    content: String,
    size: usize,
}
 
fn main() {
    let mut cache: LruCache<CacheKey, Resource> = LruCache::new(10);
    
    // Insert with composite key
    let key1 = CacheKey::new(1, "profile");
    cache.put(key1.clone(), Resource {
        content: "User 1 profile".to_string(),
        size: 100,
    });
    
    let key2 = CacheKey::new(1, "settings");
    cache.put(key2.clone(), Resource {
        content: "User 1 settings".to_string(),
        size: 50,
    });
    
    // Retrieve
    if let Some(resource) = cache.get(&CacheKey::new(1, "profile")) {
        println!("Found: {}", resource.content);
    }
    
    println!("Cache size: {}", cache.len());
}

Caching Computed Values

use lru::LruCache;
use std::time::Duration;
use std::thread;
 
fn expensive_computation(n: u32) -> u64 {
    println!("Computing fib({})...", n);
    thread::sleep(Duration::from_millis(100));
    
    if n <= 1 {
        n as u64
    } else {
        // Pretend this is expensive
        let mut a = 0u64;
        let mut b = 1u64;
        for _ in 2..=n {
            let temp = a + b;
            a = b;
            b = temp;
        }
        b
    }
}
 
struct CachedComputer {
    cache: LruCache<u32, u64>,
    hits: u64,
    misses: u64,
}
 
impl CachedComputer {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(capacity),
            misses: 0,
            hits: 0,
        }
    }
    
    fn compute(&mut self, n: u32) -> u64 {
        if let Some(&result) = self.cache.get(&n) {
            self.hits += 1;
            println!("Cache hit for {}", n);
            return result;
        }
        
        self.misses += 1;
        let result = expensive_computation(n);
        self.cache.put(n, result);
        result
    }
    
    fn stats(&self) -> (u64, u64) {
        (self.hits, self.misses)
    }
}
 
fn main() {
    let mut computer = CachedComputer::new(3);
    
    // First call - cache miss
    let r1 = computer.compute(10);
    println!("Result: {}\n", r1);
    
    // Second call - cache hit
    let r2 = computer.compute(10);
    println!("Result: {}\n", r2);
    
    // Different value - cache miss
    let r3 = computer.compute(20);
    println!("Result: {}\n", r3);
    
    // Back to first - cache hit
    let r4 = computer.compute(10);
    println!("Result: {}\n", r4);
    
    let (hits, misses) = computer.stats();
    println!("Cache stats: {} hits, {} misses", hits, misses);
}

Thread-Safe LRU Cache

use lru::LruCache;
use std::sync::{Arc, Mutex};
 
struct ThreadSafeCache<K, V> {
    cache: Mutex<LruCache<K, V>>,
}
 
impl<K, V> ThreadSafeCache<K, V>
where
    K: std::hash::Hash + Eq + Clone,
    V: Clone,
{
    fn new(capacity: usize) -> Self {
        Self {
            cache: Mutex::new(LruCache::new(capacity)),
        }
    }
    
    fn get(&self, key: &K) -> Option<V> {
        let mut cache = self.cache.lock().unwrap();
        cache.get(key).cloned()
    }
    
    fn put(&self, key: K, value: V) -> Option<V> {
        let mut cache = self.cache.lock().unwrap();
        cache.put(key, value)
    }
    
    fn get_or_insert<F>(&self, key: K, f: F) -> V
    where
        F: FnOnce() -> V,
    {
        let mut cache = self.cache.lock().unwrap();
        cache.get_or_insert(key, f).clone()
    }
    
    fn remove(&self, key: &K) -> Option<V> {
        let mut cache = self.cache.lock().unwrap();
        cache.pop(key).map(|(_, v)| v)
    }
    
    fn len(&self) -> usize {
        let cache = self.cache.lock().unwrap();
        cache.len()
    }
}
 
fn main() {
    let cache = Arc::new(ThreadSafeCache::new(5));
    
    let mut handles = vec![];
    
    for i in 0..3 {
        let cache = Arc::clone(&cache);
        handles.push(std::thread::spawn(move || {
            for j in 0..5 {
                let key = format!("key-{}-{}", i, j);
                cache.put(key.clone(), format!("value-{}", j));
                
                if let Some(value) = cache.get(&key) {
                    println!("Thread {} got: {}", i, value);
                }
            }
        }));
    }
    
    for handle in handles {
        handle.join().unwrap();
    }
    
    println!("Final cache size: {}", cache.len());
}

Real-World: HTTP Response Cache

use lru::LruCache;
use std::time::{Duration, Instant};
 
#[derive(Clone)]
struct CachedResponse {
    body: String,
    status: u16,
    cached_at: Instant,
    ttl: Duration,
}
 
impl CachedResponse {
    fn new(body: String, status: u16, ttl: Duration) -> Self {
        Self {
            body,
            status,
            cached_at: Instant::now(),
            ttl,
        }
    }
    
    fn is_fresh(&self) -> bool {
        Instant::now().duration_since(self.cached_at) < self.ttl
    }
}
 
struct HttpCache {
    cache: LruCache<String, CachedResponse>,
    default_ttl: Duration,
}
 
impl HttpCache {
    fn new(capacity: usize, default_ttl: Duration) -> Self {
        Self {
            cache: LruCache::new(capacity),
            default_ttl,
        }
    }
    
    fn get(&mut self, url: &str) -> Option<&CachedResponse> {
        if let Some(response) = self.cache.get(url) {
            if response.is_fresh() {
                return Some(response);
            }
            // Expired - remove it
            self.cache.remove(url);
        }
        None
    }
    
    fn set(&mut self, url: String, body: String, status: u16) {
        let response = CachedResponse::new(body, status, self.default_ttl);
        self.cache.put(url, response);
    }
    
    fn set_with_ttl(&mut self, url: String, body: String, status: u16, ttl: Duration) {
        let response = CachedResponse::new(body, status, ttl);
        self.cache.put(url, response);
    }
    
    fn invalidate(&mut self, url: &str) {
        self.cache.remove(url);
    }
    
    fn clear_expired(&mut self) {
        let expired: Vec<String> = self
            .cache
            .iter()
            .filter(|(_, resp)| !resp.is_fresh())
            .map(|(url, _)| url.clone())
            .collect();
        
        for url in expired {
            self.cache.remove(&url);
        }
    }
    
    fn stats(&self) -> (usize, usize) {
        (self.cache.len(), self.cache.cap())
    }
}
 
fn fetch_url(url: &str) -> (String, u16) {
    // Simulate HTTP request
    println!("Fetching {} from network...", url);
    (format!("Response body for {}", url), 200)
}
 
fn main() {
    let mut cache = HttpCache::new(100, Duration::from_secs(60));
    
    // First request - cache miss
    let url = "https://api.example.com/users";
    if cache.get(url).is_none() {
        let (body, status) = fetch_url(url);
        cache.set(url.to_string(), body, status);
    }
    
    // Second request - cache hit
    if let Some(response) = cache.get(url) {
        println!("Cache hit! Status: {}", response.status);
    }
    
    let (size, capacity) = cache.stats();
    println!("Cache: {}/{} items", size, capacity);
}

Real-World: Database Query Cache

use lru::LruCache;
use std::hash::{Hash, Hasher};
 
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct QueryKey {
    table: String,
    conditions: String,
}
 
impl QueryKey {
    fn new(table: &str, conditions: &str) -> Self {
        Self {
            table: table.to_string(),
            conditions: conditions.to_string(),
        }
    }
}
 
struct QueryCache {
    cache: LruCache<QueryKey, QueryResult>,
    hits: u64,
    misses: u64,
}
 
#[derive(Debug, Clone)]
struct QueryResult {
    rows: Vec<String>,
    execution_time_ms: u64,
}
 
impl QueryCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(capacity),
            misses: 0,
            hits: 0,
        }
    }
    
    fn get_or_execute<F>(&mut self, key: QueryKey, executor: F) -> QueryResult
    where
        F: FnOnce() -> QueryResult,
    {
        if let Some(result) = self.cache.get(&key) {
            self.hits += 1;
            println!("Cache hit for query on {}", key.table);
            return result.clone();
        }
        
        self.misses += 1;
        println!("Cache miss for query on {}", key.table);
        let result = executor();
        self.cache.put(key, result.clone());
        result
    }
    
    fn invalidate_table(&mut self, table: &str) {
        let keys_to_remove: Vec<QueryKey> = self
            .cache
            .iter()
            .filter(|(k, _)| k.table == table)
            .map(|(k, _)| k.clone())
            .collect();
        
        for key in keys_to_remove {
            self.cache.remove(&key);
        }
    }
    
    fn stats(&self) -> CacheStats {
        CacheStats {
            size: self.cache.len(),
            capacity: self.cache.cap(),
            hits: self.hits,
            misses: self.misses,
            hit_rate: if self.hits + self.misses > 0 {
                self.hits as f64 / (self.hits + self.misses) as f64
            } else {
                0.0
            },
        }
    }
}
 
struct CacheStats {
    size: usize,
    capacity: usize,
    hits: u64,
    misses: u64,
    hit_rate: f64,
}
 
impl std::fmt::Display for CacheStats {
    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
        write!(
            f,
            "Cache: {}/{}, Hits: {}, Misses: {}, Hit Rate: {:.1}%",
            self.size, self.capacity, self.hits, self.misses, self.hit_rate * 100.0
        )
    }
}
 
fn main() {
    let mut cache = QueryCache::new(50);
    
    // Execute and cache queries
    let key1 = QueryKey::new("users", "id = 1");
    let result1 = cache.get_or_execute(key1, || QueryResult {
        rows: vec!["user:1:Alice".to_string()],
        execution_time_ms: 50,
    });
    println!("Result: {:?}\n", result1.rows);
    
    // Same query - cache hit
    let key2 = QueryKey::new("users", "id = 1");
    let result2 = cache.get_or_execute(key2, || QueryResult {
        rows: vec!["user:1:Alice".to_string()],
        execution_time_ms: 50,
    });
    println!("Result: {:?}\n", result2.rows);
    
    // Different query - cache miss
    let key3 = QueryKey::new("posts", "user_id = 1");
    let result3 = cache.get_or_execute(key3, || QueryResult {
        rows: vec!["post:1", "post:2"].into_iter().map(String::from).collect(),
        execution_time_ms: 75,
    });
    println!("Result: {:?}\n", result3.rows);
    
    // Invalidate cache for a table
    cache.invalidate_table("users");
    println!("After invalidating 'users' table:");
    
    println!("Stats: {}", cache.stats());
}

Real-World: Memoization

use lru::LruCache;
 
struct Memoizer<K, V, F>
where
    K: std::hash::Hash + Eq + Clone,
    F: Fn(K) -> V,
{
    cache: LruCache<K, V>,
    compute: F,
}
 
impl<K, V, F> Memoizer<K, V, F>
where
    K: std::hash::Hash + Eq + Clone,
    V: Clone,
    F: Fn(K) -> V,
{
    fn new(capacity: usize, compute: F) -> Self {
        Self {
            cache: LruCache::new(capacity),
            compute,
        }
    }
    
    fn call(&mut self, key: K) -> V {
        if let Some(value) = self.cache.get(&key) {
            return value.clone();
        }
        
        let value = (self.compute)(key.clone());
        self.cache.put(key, value.clone());
        value
    }
    
    fn cache_size(&self) -> usize {
        self.cache.len()
    }
}
 
fn main() {
    // Memoize expensive string operations
    let mut uppercaser = Memoizer::new(100, |s: String| {
        println!("Computing uppercase for: {}", s);
        s.to_uppercase()
    });
    
    println!("Result: {}", uppercaser.call("hello".to_string()));
    println!("Result: {}", uppercaser.call("hello".to_string())); // Cached
    println!("Result: {}", uppercaser.call("world".to_string()));
    println!("Result: {}", uppercaser.call("hello".to_string())); // Still cached
    
    println!("Cache size: {}", uppercaser.cache_size());
    
    // Memoize recursive function (factorial)
    let mut factorials: LruCache<u32, u64> = LruCache::new(100);
    
    fn factorial(n: u32, cache: &mut LruCache<u32, u64>) -> u64 {
        if let Some(&result) = cache.get(&n) {
            return result;
        }
        
        let result = if n <= 1 {
            1
        } else {
            factorial(n - 1, cache) * n as u64
        };
        
        cache.put(n, result);
        result
    }
    
    let fact_10 = factorial(10, &mut factorials);
    println!("10! = {}", fact_10);
    println!("Factorial cache size: {}", factorials.len());
}

Summary

  • Use LruCache::new(capacity) to create a cache with a maximum size
  • put() inserts items, evicting LRU if at capacity
  • get() retrieves items and promotes them to most-recently-used
  • peek() retrieves without promoting (doesn't affect eviction order)
  • get_or_insert() for "get or compute" pattern
  • iter() and iter_mut() iterate from MRU to LRU
  • pop() removes and returns the least recently used item
  • resize() changes capacity, evicting items if necessary
  • Combine with Mutex or Arc<Mutex> for thread safety
  • Perfect for: caching computed results, rate limiting, HTTP response caching, query caching
  • O(1) average time complexity for all operations