How do I implement LRU caching with the lru crate in Rust?

Walkthrough

The lru crate provides a fast, thread-safe implementation of an LRU (Least Recently Used) cache. An LRU cache evicts the least recently accessed items when it reaches capacity, making it ideal for caching scenarios where you want to keep frequently accessed items. The crate offers O(1) average time complexity for insert, get, and remove operations. It supports iterating over entries, peeking at items without updating their access order, and provides both mutable and immutable access patterns.

Key concepts:

  1. Capacity — maximum number of items the cache can hold
  2. Eviction — automatically removes least recently used item when full
  3. Access order — getting or inserting moves item to "most recently used"
  4. Peek — access item without affecting its LRU position
  5. Iteration — iterate from most to least recently used

Code Example

# Cargo.toml
[dependencies]
lru = "0.12"
use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    // Create an LRU cache with capacity for 3 items
    let mut cache: LruCache<&str, u32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // Insert items
    cache.put("apple", 1);
    cache.put("banana", 2);
    cache.put("cherry", 3);
    
    // Cache is now full
    println!("Cache size: {}", cache.len());
    
    // Inserting another item evicts the least recently used ("apple")
    cache.put("date", 4);
    
    // "apple" has been evicted
    assert_eq!(cache.get(&"apple"), None);
    println!("'apple' was evicted");
    
    // Other items still exist
    assert_eq!(cache.get(&"banana"), Some(&2));
    println!("'banana' is still cached: {:?}", cache.get(&"banana"));
}

Basic Operations

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    // Insert items
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Size: {}, Capacity: {}", cache.len(), cache.cap().get());
    
    // Get an item (marks as recently used)
    if let Some(value) = cache.get(&"a") {
        println!("Got 'a': {}", value);
    }
    
    // Check if key exists
    println!("Contains 'b': {}", cache.contains(&"b"));
    println!("Contains 'x': {}", cache.contains(&"x"));
    
    // Remove an item
    let removed = cache.pop(&"b");
    println!("Removed 'b': {:?}", removed);
    println!("Size after removal: {}", cache.len());
    
    // Clear the cache
    cache.clear();
    println!("Size after clear: {}", cache.len());
}

LRU Eviction Order

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // Insert a, b, c
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    println!("Initial order (most to least recent):");
    for (k, v) in cache.iter() {
        println!("  {} -> {}", k, v);
    }
    
    // Access "a" - moves it to most recent
    cache.get(&"a");
    
    println!("\nAfter accessing 'a':");
    for (k, v) in cache.iter() {
        println!("  {} -> {}", k, v);
    }
    
    // Insert "d" - evicts least recently used ("b")
    cache.put("d", 4);
    
    println!("\nAfter inserting 'd':");
    for (k, v) in cache.iter() {
        println!("  {} -> {}", k, v);
    }
    
    println!("\n'b' evicted: {}", !cache.contains(&"b"));
}

Peek Without Updating Order

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // Order: c (most recent), b, a (least recent)
    
    // Peek at "a" without affecting order
    let value = cache.peek(&"a");
    println!("Peeked at 'a': {:?}", value);
    
    // Order is still: c, b, a
    println!("\nOrder after peek:");
    for (k, _) in cache.iter() {
        println!("  {}", k);
    }
    
    // Get "a" - moves to most recent
    let value = cache.get(&"a");
    println!("\nGot 'a': {:?}", value);
    
    // Order is now: a, c, b
    println!("\nOrder after get:");
    for (k, _) in cache.iter() {
        println!("  {}", k);
    }
}

Mutable Access

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, Vec<i32>> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("list", vec![1, 2, 3]);
    
    // Get mutable reference (updates access order)
    if let Some(list) = cache.get_mut(&"list") {
        list.push(4);
        list.push(5);
    }
    
    println!("Updated list: {:?}", cache.get(&"list"));
    
    // Peek mutable (does not update access order)
    if let Some(list) = cache.peek_mut(&"list") {
        list.push(6);
    }
    
    println!("After peek_mut: {:?}", cache.peek(&"list"));
}

Get or Insert Pattern

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, String> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    // get_or_insert returns value or inserts default
    let value = cache.get_or_insert("key", || {
        println!("Computing value for 'key'...");
        "computed value".to_string()
    });
    println!("Got: {}", value);
    
    // Second call uses cached value
    let value = cache.get_or_insert("key", || {
        println!("This won't be printed");
        "new value".to_string()
    });
    println!("Got again: {}", value);
    
    // get_or_insert_mut for mutable access
    let value = cache.get_or_insert_mut("another", || "default".to_string());
    value.push_str(" - modified");
    println!("Modified: {}", cache.get(&"another").unwrap());
}

Iteration

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<i32, &str> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put(1, "one");
    cache.put(2, "two");
    cache.put(3, "three");
    cache.put(4, "four");
    cache.put(5, "five");
    
    // Iterate from most to least recently used
    println!("Iter (most to least recent):");
    for (key, value) in cache.iter() {
        println!("  {} -> {}", key, value);
    }
    
    // Iterate mutably
    println!("\nIter mut (doubling values): ");
    for (key, value) in cache.iter_mut() {
        *value = &format!("{}-{}", value, value);
    }
    
    for (k, v) in cache.iter() {
        println!("  {} -> {}", k, v);
    }
}

Reverse Iteration

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // Most to least recent
    println!("Forward (most to least): ");
    for (k, v) in cache.iter() {
        print!("{}:{} ", k, v);
    }
    
    // Least to most recent
    println!("\n\nBackward (least to most): ");
    for (k, v) in cache.iter().rev() {
        print!("{}:{} ", k, v);
    }
    println!();
}

Get LRU Entry

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // Get the least recently used entry
    if let Some((key, value)) = cache.peek_lru() {
        println!("Least recently used: {} -> {}", key, value);
    }
    
    // Peek LRU doesn't affect order
    cache.peek_lru();
    
    // Get and remove the least recently used
    if let Some((key, value)) = cache.pop_lru() {
        println!("Popped LRU: {} -> {}", key, value);
    }
    
    println!("Remaining items: {}", cache.len());
}

Get Most Recently Used

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // Peek at most recently used
    if let Some((key, value)) = cache.peek_mru() {
        println!("Most recently used: {} -> {}", key, value);
    }
}

Resizing

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(5).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    cache.put("d", 4);
    cache.put("e", 5);
    
    println!("Size: {}", cache.len());
    
    // Resize smaller - evicts least recently used items
    cache.resize(NonZeroUsize::new(3).unwrap());
    
    println!("After resize to 3: size = {}", cache.len());
    println!("Remaining items:");
    for (k, v) in cache.iter() {
        println!("  {} -> {}", k, v);
    }
    
    // Resize larger
    cache.resize(NonZeroUsize::new(10).unwrap());
    println!("\nAfter resize to 10: capacity = {}", cache.cap().get());
}

Contains and Is Empty

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    println!("Is empty: {}", cache.is_empty());
    
    cache.put("a", 1);
    
    println!("Is empty: {}", cache.is_empty());
    println!("Contains 'a': {}", cache.contains(&"a"));
    println!("Contains 'b': {}", cache.contains(&"b"));
    
    // contains doesn't update access order
    cache.put("b", 2);
    cache.put("c", 3);
    
    cache.contains(&"a"); // doesn't mark 'a' as recently used
    
    cache.put("d", 4); // evicts 'a' (least recently used)
    
    println!("\nAfter inserting 'd':");
    println!("Contains 'a': {}", cache.contains(&"a"));
}

Real-World: Function Result Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
struct FibonacciCache {
    cache: LruCache<u32, u64>,
}
 
impl FibonacciCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn fib(&mut self, n: u32) -> u64 {
        // Check cache first
        if let Some(&result) = self.cache.get(&n) {
            return result;
        }
        
        // Compute and cache
        let result = if n <= 1 {
            n as u64
        } else {
            self.fib(n - 1) + self.fib(n - 2)
        };
        
        self.cache.put(n, result);
        result
    }
}
 
fn main() {
    let mut fib = FibonacciCache::new(100);
    
    println!("fib(10) = {}", fib.fib(10));
    println!("fib(20) = {}", fib.fib(20));
    println!("fib(30) = {}", fib.fib(30));
    println!("fib(40) = {}", fib.fib(40));
    println!("fib(50) = {}", fib.fib(50));
    
    // Second call uses cache
    println!("fib(50) again = {}", fib.fib(50));
}

Real-World: HTTP Response Cache

use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
 
#[derive(Clone)]
struct CachedResponse {
    body: String,
    cached_at: Instant,
    ttl: Duration,
}
 
impl CachedResponse {
    fn new(body: String, ttl: Duration) -> Self {
        Self {
            body,
            cached_at: Instant::now(),
            ttl,
        }
    }
    
    fn is_valid(&self) -> bool {
        Instant::now().duration_since(self.cached_at) < self.ttl
    }
}
 
struct HttpCache {
    cache: LruCache<String, CachedResponse>,
}
 
impl HttpCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn get(&mut self, url: &str) -> Option<String> {
        if let Some(cached) = self.cache.peek(url) {
            if cached.is_valid() {
                // Get to update access order
                return self.cache.get(url).map(|r| r.body.clone());
            } else {
                // Remove expired entry
                self.cache.pop(url);
            }
        }
        None
    }
    
    fn set(&mut self, url: String, body: String, ttl: Duration) {
        self.cache.put(url, CachedResponse::new(body, ttl));
    }
}
 
// Simulated HTTP client
fn fetch_url(url: &str) -> String {
    format!("Response from {}", url)
}
 
fn main() {
    let mut cache = HttpCache::new(10);
    let url = "https://example.com/api".to_string();
    
    // First request - cache miss
    if let Some(response) = cache.get(&url) {
        println!("Cache hit: {}", response);
    } else {
        println!("Cache miss, fetching...");
        let response = fetch_url(&url);
        cache.set(url.clone(), response.clone(), Duration::from_secs(60));
        println!("Response: {}", response);
    }
    
    // Second request - cache hit
    if let Some(response) = cache.get(&url) {
        println!("Cache hit: {}", response);
    }
}

Real-World: Database Query Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
#[derive(Debug, Clone)]
struct User {
    id: u32,
    name: String,
    email: String,
}
 
struct UserCache {
    cache: LruCache<u32, User>,
}
 
impl UserCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
        }
    }
    
    fn get(&mut self, id: u32) -> Option<&User> {
        self.cache.get(&id)
    }
    
    fn get_mut(&mut self, id: u32) -> Option<&mut User> {
        self.cache.get_mut(&id)
    }
    
    fn put(&mut self, user: User) {
        self.cache.put(user.id, user);
    }
    
    fn remove(&mut self, id: u32) -> Option<User> {
        self.cache.pop(&id)
    }
    
    fn contains(&self, id: u32) -> bool {
        self.cache.contains(&id)
    }
}
 
// Simulated database
fn fetch_user_from_db(id: u32) -> User {
    User {
        id,
        name: format!("User {}", id),
        email: format!("user{}@example.com", id),
    }
}
 
fn main() {
    let mut cache = UserCache::new(100);
    
    // Cache miss - fetch from DB
    let user_id = 1;
    let user = cache.get(user_id).cloned().unwrap_or_else(|| {
        println!("Cache miss for user {}, fetching from DB...", user_id);
        let user = fetch_user_from_db(user_id);
        cache.put(user.clone());
        user
    });
    println!("Got user: {:?}", user);
    
    // Cache hit
    let user = cache.get(user_id).unwrap();
    println!("Got user from cache: {}", user.name);
    
    // Update user
    if let Some(user) = cache.get_mut(user_id) {
        user.name = "Updated Name".to_string();
    }
    println!("Updated user: {:?}", cache.get(user_id));
}

Real-World: Image Thumbnail Cache

use lru::LruCache;
use std::num::NonZeroUsize;
 
struct Thumbnail {
    width: u32,
    height: u32,
    data: Vec<u8>,
}
 
impl Thumbnail {
    fn new(width: u32, height: u32) -> Self {
        // Simulate generating thumbnail data
        let data = vec![0u8; (width * height * 3) as usize];
        Self { width, height, data }
    }
}
 
struct ThumbnailCache {
    cache: LruCache<String, Thumbnail>,
    hits: u64,
    misses: u64,
}
 
impl ThumbnailCache {
    fn new(capacity: usize) -> Self {
        Self {
            cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            hits: 0,
            misses: 0,
        }
    }
    
    fn get_or_generate(&mut self, image_path: &str, width: u32, height: u32) -> &Thumbnail {
        let key = format!("{}:{}x{}", image_path, width, height);
        
        if self.cache.contains(&key) {
            self.hits += 1;
            self.cache.get(&key).unwrap();
        } else {
            self.misses += 1;
            let thumbnail = Thumbnail::new(width, height);
            self.cache.put(key.clone(), thumbnail);
            self.cache.get(&key).unwrap()
        }
    }
    
    fn stats(&self) -> (u64, u64) {
        (self.hits, self.misses)
    }
}
 
fn main() {
    let mut cache = ThumbnailCache::new(50);
    
    // Generate and cache thumbnails
    let thumb1 = cache.get_or_generate("photo.jpg", 100, 100);
    println!("Thumbnail 1: {}x{}", thumb1.width, thumb1.height);
    
    let thumb2 = cache.get_or_generate("photo.jpg", 100, 100);
    println!("Thumbnail 2: {}x{}", thumb2.width, thumb2.height);
    
    let thumb3 = cache.get_or_generate("photo.jpg", 200, 200);
    println!("Thumbnail 3: {}x{}", thumb3.width, thumb3.height);
    
    let (hits, misses) = cache.stats();
    println!("Cache stats: {} hits, {} misses", hits, misses);
}

Real-World: Rate Limiter

use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
 
struct RateLimiter {
    requests: LruCache<String, Vec<Instant>>,
    max_requests: usize,
    window: Duration,
}
 
impl RateLimiter {
    fn new(capacity: usize, max_requests: usize, window: Duration) -> Self {
        Self {
            requests: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
            max_requests,
            window,
        }
    }
    
    fn is_allowed(&mut self, key: &str) -> bool {
        let now = Instant::now();
        let cutoff = now - self.window;
        
        // Get or create request times for this key
        let times = self.requests.get_or_insert_mut(key.to_string(), Vec::new);
        
        // Remove old requests
        times.retain(|&t| t > cutoff);
        
        // Check if under limit
        if times.len() < self.max_requests {
            times.push(now);
            true
        } else {
            false
        }
    }
}
 
fn main() {
    let mut limiter = RateLimiter::new(1000, 3, Duration::from_secs(60));
    
    let user = "user_123";
    
    // First 3 requests allowed
    for i in 1..=4 {
        if limiter.is_allowed(user) {
            println!("Request {} allowed", i);
        } else {
            println!("Request {} rate limited!", i);
        }
    }
}

Thread Safety with Mutex

use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use std::thread;
 
fn main() {
    let cache = Arc::new(Mutex::new(
        LruCache::<&str, String>::new(NonZeroUsize::new(100).unwrap())
    ));
    
    let handles: Vec<_> = (0..5)
        .map(|i| {
            let cache = Arc::clone(&cache);
            thread::spawn(move || {
                let key = "shared_key";
                
                // Try to get from cache
                {
                    let mut cache = cache.lock().unwrap();
                    if let Some(value) = cache.get(key) {
                        println!("Thread {} got cached: {}", i, value);
                        return;
                    }
                }
                
                // Simulate expensive computation
                let value = format!("computed_by_thread_{}", i);
                println!("Thread {} computed: {}", i, value);
                
                // Store in cache
                {
                    let mut cache = cache.lock().unwrap();
                    cache.put(key, value.clone());
                }
            })
        })
        .collect();
    
    for handle in handles {
        handle.join().unwrap();
    }
    
    // Show final cache state
    let cache = cache.lock().unwrap();
    println!("\nFinal cache:");
    for (k, v) in cache.iter() {
        println!("  {} -> {}", k, v);
    }
}

Custom Types as Keys

use lru::LruCache;
use std::num::NonZeroUsize;
 
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct CacheKey {
    region: String,
    resource_id: u32,
}
 
impl CacheKey {
    fn new(region: &str, id: u32) -> Self {
        Self {
            region: region.to_string(),
            resource_id: id,
        }
    }
}
 
fn main() {
    let mut cache: LruCache<CacheKey, String> = LruCache::new(NonZeroUsize::new(10).unwrap());
    
    cache.put(CacheKey::new("us-east", 1), "data-1".to_string());
    cache.put(CacheKey::new("us-west", 1), "data-2".to_string());
    cache.put(CacheKey::new("us-east", 2), "data-3".to_string());
    
    let key = CacheKey::new("us-east", 1);
    if let Some(data) = cache.get(&key) {
        println!("Got data: {}", data);
    }
    
    println!("\nAll entries:");
    for (k, v) in cache.iter() {
        println!("  {:?} -> {}", k, v);
    }
}

Promote Entry

use lru::LruCache;
use std::num::NonZeroUsize;
 
fn main() {
    let mut cache: LruCache<&str, i32> = LruCache::new(NonZeroUsize::new(3).unwrap());
    
    cache.put("a", 1);
    cache.put("b", 2);
    cache.put("c", 3);
    
    // "a" is least recently used
    
    // Promote "a" without getting its value
    cache.promote(&"a");
    
    // Now "a" is most recently used
    println!("After promoting 'a':");
    for (k, v) in cache.iter() {
        println!("  {} -> {}", k, v);
    }
    
    // Promoting non-existent key does nothing
    cache.promote(&"nonexistent");
}

Summary

  • LruCache::new(capacity) creates a cache with the given capacity
  • cache.put(key, value) inserts or updates an entry
  • cache.get(&key) returns value and marks as recently used
  • cache.peek(&key) returns value without affecting order
  • cache.get_mut(&key) returns mutable reference and updates order
  • cache.peek_mut(&key) returns mutable reference without updating order
  • cache.pop(&key) removes and returns an entry
  • cache.pop_lru() removes and returns the least recently used entry
  • cache.contains(&key) checks existence without affecting order
  • cache.iter() iterates from most to least recently used
  • cache.iter().rev() iterates from least to most recently used
  • cache.resize(new_capacity) resizes cache (may evict entries)
  • cache.get_or_insert(key, init) gets or inserts default
  • cache.promote(&key) marks entry as recently used without returning value
  • Use Mutex or RwLock for thread-safe access
  • Perfect for: function result caching, HTTP caches, database query caches