Loading page…
Rust walkthroughs
Loading page…
The lru crate provides a fast, thread-safe Least Recently Used (LRU) cache implementation. An LRU cache automatically evicts the least recently accessed items when it reaches capacity, making it ideal for caching where memory is limited. Common use cases include memoizing expensive computations, caching database queries, HTTP response caching, and rate limiting.
Key concepts:
get_mut for modifying cached values# Cargo.toml
[dependencies]
lru = "0.12"use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
// Create an LRU cache with capacity for 3 entries
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
// Insert values
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
println!("Cache contents: a={}, b={}, c={}",
cache.get(&"a").unwrap(),
cache.get(&"b").unwrap(),
cache.get(&"c").unwrap()
);
// Adding a 4th item evicts the least recently used
cache.put("d", 4);
println!("After adding 'd', 'a' is evicted: {:?}", cache.get(&"a"));
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(5).unwrap();
let mut cache: LruCache<String, String> = LruCache::new(capacity);
// Insert values
cache.put("key1".to_string(), "value1".to_string());
cache.put("key2".to_string(), "value2".to_string());
cache.put("key3".to_string(), "value3".to_string());
// Get values
if let Some(value) = cache.get(&"key1".to_string()) {
println!("Got key1: {}", value);
}
// Check if key exists (without promoting to most-recent)
println!("Contains key2: {}", cache.contains(&"key2".to_string()));
// Get without promoting (doesn't update LRU order)
if let Some(value) = cache.peek(&"key3".to_string()) {
println!("Peeked key3: {}", value);
}
// Get mutable reference
if let Some(value) = cache.get_mut(&"key2".to_string()) {
*value = "updated_value2".to_string();
}
// Remove a specific key
if let Some((k, v)) = cache.pop(&"key1".to_string()) {
println!("Removed {}: {}", k, v);
}
// Check current size
println!("Cache size: {}", cache.len());
println!("Cache capacity: {}", cache.cap());
// Clear all entries
cache.clear();
println!("After clear, size: {}", cache.len());
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(3).unwrap();
let mut cache: LruCache<&str, i32> = LruCache::new(capacity);
println!("Cache capacity: 3");
println!();
// Add three items: [a:1, b:2, c:3]
// Most recently used order: c, b, a
cache.put("a", 1);
cache.put("b", 2);
cache.put("c", 3);
println!("Added a, b, c. LRU order (least to most recent): a, b, c");
// Access 'a' - it becomes most recently used
// Order is now: a, c, b
cache.get(&"a");
println!("Accessed 'a'. LRU order: b, c, a");
// Add 'd' - 'b' is evicted (least recently used)
cache.put("d", 4);
println!("Added 'd'. 'b' was evicted.");
println!(" Contains 'a': {}", cache.contains(&"a"));
println!(" Contains 'b': {}", cache.contains(&"b"));
println!(" Contains 'c': {}", cache.contains(&"c"));
println!(" Contains 'd': {}", cache.contains(&"d"));
// Add 'e' - 'c' is evicted
cache.put("e", 5);
println!("\nAdded 'e'. 'c' was evicted.");
println!(" Contains 'a': {}", cache.contains(&"a"));
println!(" Contains 'c': {}", cache.contains(&"c"));
println!(" Contains 'd': {}", cache.contains(&"d"));
println!(" Contains 'e': {}", cache.contains(&"e"));
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(10).unwrap();
let mut cache: LruCache<String, String> = LruCache::new(capacity);
// get_or_insert: insert if missing, always promote to most recent
let value = cache.get_or_insert("key1".to_string(), || "computed_value".to_string());
println!("First get_or_insert: {}", value);
let value = cache.get_or_insert("key1".to_string(), || {
panic!("This should not be called!");
});
println!("Second get_or_insert (cached): {}", value);
// get_or_insert_mut: mutable access
{
let value = cache.get_or_insert_mut("key2".to_string(), || "initial".to_string());
*value = "modified".to_string();
}
let value = cache.get(&"key2".to_string()).unwrap();
println!("Modified value: {}", value);
// Using get_or_insert with expensive computation
fn expensive_computation(n: i32) -> String {
println!(" Computing for {}...", n);
std::thread::sleep(std::time::Duration::from_millis(100));
format!("result_{}", n * n)
}
println!("\nFirst call (computes):";
let result = cache.get_or_insert("expensive_5".to_string(), || expensive_computation(5));
println!("Result: {}", result);
println!("\nSecond call (cached):";
let result = cache.get_or_insert("expensive_5".to_string(), || expensive_computation(5));
println!("Result: {}", result);
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(5).unwrap();
let mut cache: LruCache<String, i32> = LruCache::new(capacity);
cache.put("one".to_string(), 1);
cache.put("two".to_string(), 2);
cache.put("three".to_string(), 3);
cache.put("four".to_string(), 4);
cache.put("five".to_string(), 5);
// Iterate (least recently used to most recently used)
println!("Iterating (LRU to MRU):";
for (key, value) in cache.iter() {
println!(" {}: {}", key, value);
}
// Iterate in reverse (most recently used to least)
println!("\nIterating in reverse (MRU to LRU):";
for (key, value) in cache.iter().rev() {
println!(" {}: {}", key, value);
}
// Mutable iteration
println!("\nDoubling values:";
for (key, value) in cache.iter_mut() {
*value *= 2;
println!(" {}: {}", key, value);
}
// Iterate over keys only
println!("\nKeys only:";
for key in cache.keys() {
println!(" {}", key);
}
// Iterate over values only
println!("\nValues only:";
for value in cache.values() {
println!(" {}", value);
}
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};
fn fibonacci(n: u64) -> u64 {
if n <= 1 {
return n;
}
fibonacci(n - 1) + fibonacci(n - 2)
}
fn fibonacci_memoized(n: u64, cache: &mut LruCache<u64, u64>) -> u64 {
if let Some(&result) = cache.get(&n) {
return result;
}
let result = if n <= 1 {
n
} else {
fibonacci_memoized(n - 1, cache) + fibonacci_memoized(n - 2, cache)
};
cache.put(n, result);
result
}
fn main() {
let capacity = NonZeroUsize::new(100).unwrap();
let mut cache: LruCache<u64, u64> = LruCache::new(capacity);
// Without memoization (slow for large n)
println!("Computing fib(35) without memoization...";
let start = std::time::Instant::now();
let result = fibonacci(35);
println!(" Result: {}", result);
println!(" Time: {:?}", start.elapsed());
// With memoization (fast)
println!("\nComputing fib(35) with memoization...";
let start = std::time::Instant::now();
let result = fibonacci_memoized(35, &mut cache);
println!(" Result: {}", result);
println!(" Time: {:?}", start.elapsed());
// Even larger values with memoization
println!("\nComputing fib(100) with memoization...";
let start = std::time::Instant::now();
let result = fibonacci_memoized(100, &mut cache);
println!(" Result: {}", result);
println!(" Time: {:?}", start.elapsed());
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
#[derive(Clone)]
struct CachedResponse {
body: String,
status: u16,
cached_at: Instant,
ttl: Duration,
}
impl CachedResponse {
fn new(body: String, status: u16, ttl: Duration) -> Self {
Self {
body,
status,
cached_at: Instant::now(),
ttl,
}
}
fn is_expired(&self) -> bool {
Instant::now().duration_since(self.cached_at) > self.ttl
}
}
struct HttpCache {
cache: LruCache<String, CachedResponse>,
}
impl HttpCache {
fn new(capacity: usize) -> Self {
Self {
cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
}
}
fn get(&mut self, url: &str) -> Option<String> {
if let Some(cached) = self.cache.get(url) {
if !cached.is_expired() {
return Some(cached.body.clone());
}
// Remove expired entry
self.cache.pop(url);
}
None
}
fn set(&mut self, url: String, body: String, ttl: Duration) {
let response = CachedResponse::new(body, 200, ttl);
self.cache.put(url, response);
}
fn invalidate(&mut self, url: &str) {
self.cache.pop(url);
}
}
// Simulated HTTP client
fn fetch_url(url: &str) -> String {
println!("Fetching {} from server...", url);
format!("Response from {}", url)
}
fn main() {
let mut cache = HttpCache::new(100);
// First request - cache miss
let url = "https://api.example.com/users";
match cache.get(url) {
Some(body) => println!("Cache hit: {}", body),
None => {
let response = fetch_url(url);
cache.set(url.to_string(), response.clone(), Duration::from_secs(60));
println!("Cached response");
}
}
// Second request - cache hit
match cache.get(url) {
Some(body) => println!("Cache hit: {}", body),
None => {
let response = fetch_url(url);
cache.set(url.to_string(), response, Duration::from_secs(60));
}
}
// Invalidate
cache.invalidate(url);
println!("\nInvalidated cache for {}", url);
// Request after invalidation - cache miss
match cache.get(url) {
Some(body) => println!("Cache hit: {}", body),
None => {
let response = fetch_url(url);
cache.set(url.to_string(), response, Duration::from_secs(60));
println!("Cached response");
}
}
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::hash::{Hash, Hasher, DefaultHasher};
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct QueryKey {
table: String,
conditions: String,
}
struct QueryResult {
rows: Vec<String>,
execution_time_ms: u64,
}
struct DatabaseCache {
cache: LruCache<QueryKey, QueryResult>,
hits: u64,
misses: u64,
}
impl DatabaseCache {
fn new(capacity: usize) -> Self {
Self {
cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
hits: 0,
misses: 0,
}
}
fn query(&mut self, table: &str, conditions: &str) -> &QueryResult {
let key = QueryKey {
table: table.to_string(),
conditions: conditions.to_string(),
};
if self.cache.contains(&key) {
self.hits += 1;
} else {
self.misses += 1;
let result = self.execute_query(table, conditions);
self.cache.put(key, result);
}
self.cache.get(&key).unwrap()
}
fn execute_query(&self, table: &str, conditions: &str) -> QueryResult {
// Simulated database query
println!("Executing: SELECT * FROM {} WHERE {}", table, conditions);
std::thread::sleep(std::time::Duration::from_millis(50));
QueryResult {
rows: vec![
format!("row1 from {}", table),
format!("row2 from {}", table),
format!("row3 from {}", table),
],
execution_time_ms: 50,
}
}
fn stats(&self) -> (u64, u64, f64) {
let total = self.hits + self.misses;
let hit_rate = if total > 0 {
self.hits as f64 / total as f64
} else {
0.0
};
(self.hits, self.misses, hit_rate)
}
}
fn main() {
let mut db_cache = DatabaseCache::new(50);
// First query - cache miss
let result = db_cache.query("users", "age > 18");
println!("Rows: {:?}\n", result.rows);
// Same query - cache hit
let result = db_cache.query("users", "age > 18");
println!("Rows: {:?}\n", result.rows);
// Different conditions - cache miss
let result = db_cache.query("users", "name = 'Alice'");
println!("Rows: {:?}\n", result.rows);
// Same as first query - cache hit
let result = db_cache.query("users", "age > 18");
println!("Rows: {:?}\n", result.rows);
// Different table - cache miss
let result = db_cache.query("orders", "status = 'pending'");
println!("Rows: {:?}\n", result.rows);
let (hits, misses, hit_rate) = db_cache.stats();
println!("Cache stats: {} hits, {} misses, {:.1}% hit rate",
hits, misses, hit_rate * 100.0);
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::sync::{Arc, Mutex};
use std::thread;
struct ThreadSafeCache<K, V> {
cache: Mutex<LruCache<K, V>>,
}
impl<K, V> ThreadSafeCache<K, V>
where
K: std::hash::Hash + Eq + Clone,
V: Clone,
{
fn new(capacity: usize) -> Self {
Self {
cache: Mutex::new(LruCache::new(NonZeroUsize::new(capacity).unwrap())),
}
}
fn get(&self, key: &K) -> Option<V> {
let mut cache = self.cache.lock().unwrap();
cache.get(key).cloned()
}
fn put(&self, key: K, value: V) {
let mut cache = self.cache.lock().unwrap();
cache.put(key, value);
}
fn get_or_insert<F>(&self, key: K, f: F) -> V
where
F: FnOnce() -> V,
{
let mut cache = self.cache.lock().unwrap();
cache.get_or_insert(key.clone(), f).clone()
}
}
fn main() {
let cache = Arc::new(ThreadSafeCache::new(100));
let mut handles = vec![];
for i in 0..5 {
let cache_clone = Arc::clone(&cache);
let handle = thread::spawn(move || {
for j in 0..10 {
let key = format!("key-{}-{}", i, j % 5);
let value = cache_clone.get_or_insert(key.clone(), || {
println!("Thread {} computing {}", i, key);
std::thread::sleep(std::time::Duration::from_millis(10));
format!("value-{}", key)
});
println!("Thread {} got: {}", i, value);
}
});
handles.push(handle);
}
for handle in handles {
handle.join().unwrap();
}
}use lru::LruCache;
use std::num::NonZeroUsize;
fn main() {
let capacity = NonZeroUsize::new(5).unwrap();
let mut cache: LruCache<i32, &str> = LruCache::new(capacity);
// Fill the cache
for i in 1..=5 {
cache.put(i, &format!("value{}", i));
}
println!("Initial state: {} items", cache.len());
// Resize to larger capacity
let new_capacity = NonZeroUsize::new(10).unwrap();
cache.resize(new_capacity);
println!("After resize to 10: capacity = {}, len = {}", cache.cap(), cache.len());
// Add more items
for i in 6..=10 {
cache.put(i, &format!("value{}", i));
}
println!("After adding more: {} items", cache.len());
// Resize to smaller capacity (evicts LRU items)
let smaller_capacity = NonZeroUsize::new(3).unwrap();
cache.resize(smaller_capacity);
println!("After resize to 3: capacity = {}, len = {}", cache.cap(), cache.len());
// Check what remains (most recently used items)
println!("Remaining items:");
for (k, v) in cache.iter() {
println!(" {}: {}", k, v);
}
}use lru::LruCache;
use std::num::NonZeroUsize;
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
struct UserKey {
tenant_id: u32,
user_id: u64,
}
impl UserKey {
fn new(tenant_id: u32, user_id: u64) -> Self {
Self { tenant_id, user_id }
}
}
#[derive(Debug, Clone)]
struct User {
name: String,
email: String,
role: String,
}
struct UserCache {
cache: LruCache<UserKey, User>,
}
impl UserCache {
fn new(capacity: usize) -> Self {
Self {
cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
}
}
fn get_user(&mut self, tenant_id: u32, user_id: u64) -> Option<&User> {
let key = UserKey::new(tenant_id, user_id);
self.cache.get(&key)
}
fn put_user(&mut self, tenant_id: u32, user_id: u64, user: User) {
let key = UserKey::new(tenant_id, user_id);
self.cache.put(key, user);
}
fn remove_user(&mut self, tenant_id: u32, user_id: u64) -> Option<User> {
let key = UserKey::new(tenant_id, user_id);
self.cache.pop(&key).map(|(_, v)| v)
}
}
fn main() {
let mut cache = UserCache::new(100);
cache.put_user(1, 100, User {
name: "Alice".to_string(),
email: "alice@tenant1.com".to_string(),
role: "admin".to_string(),
});
cache.put_user(1, 101, User {
name: "Bob".to_string(),
email: "bob@tenant1.com".to_string(),
role: "user".to_string(),
});
cache.put_user(2, 100, User {
name: "Charlie".to_string(),
email: "charlie@tenant2.com".to_string(),
role: "admin".to_string(),
});
// Get user from tenant 1
if let Some(user) = cache.get_user(1, 100) {
println!("Found: {:?}", user);
}
// Same user_id but different tenant
if let Some(user) = cache.get_user(2, 100) {
println!("Found: {:?}", user);
}
// Non-existent user
if cache.get_user(3, 100).is_none() {
println!("User not found");
}
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::time::{Duration, Instant};
#[derive(Clone)]
struct RateLimitEntry {
count: u32,
window_start: Instant,
}
struct RateLimiter {
requests: LruCache<String, RateLimitEntry>,
max_requests: u32,
window_duration: Duration,
}
impl RateLimiter {
fn new(capacity: usize, max_requests: u32, window_duration: Duration) -> Self {
Self {
requests: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
max_requests,
window_duration,
}
}
fn check(&mut self, client_id: &str) -> bool {
let now = Instant::now();
if let Some(entry) = self.requests.get_mut(client_id) {
// Check if window has expired
if now.duration_since(entry.window_start) > self.window_duration {
// Reset window
entry.count = 1;
entry.window_start = now;
return true;
}
// Check if under limit
if entry.count < self.max_requests {
entry.count += 1;
return true;
}
// Over limit
false
} else {
// New client
self.requests.put(client_id.to_string(), RateLimitEntry {
count: 1,
window_start: now,
});
true
}
}
fn remaining(&mut self, client_id: &str) -> u32 {
let now = Instant::now();
if let Some(entry) = self.requests.get(client_id) {
if now.duration_since(entry.window_start) > self.window_duration {
return self.max_requests;
}
return self.max_requests.saturating_sub(entry.count);
}
self.max_requests
}
}
fn main() {
// 5 requests per second per client
let mut limiter = RateLimiter::new(1000, 5, Duration::from_secs(1));
let client = "client-123";
// Make requests
for i in 1..=7 {
let allowed = limiter.check(client);
let remaining = limiter.remaining(client);
println!("Request {}: allowed={}, remaining={}", i, allowed, remaining);
}
println!("\nWaiting for window to reset...");
std::thread::sleep(Duration::from_millis(1100));
// Try again after window reset
let allowed = limiter.check(client);
println!("\nAfter reset: allowed={}", allowed);
}use lru::LruCache;
use std::num::NonZeroUsize;
use std::collections::HashMap;
#[derive(Debug, Clone)]
struct Config {
values: HashMap<String, String>,
version: u32,
}
struct ConfigCache {
cache: LruCache<String, Config>,
default_ttl_seconds: u64,
}
impl ConfigCache {
fn new(capacity: usize) -> Self {
Self {
cache: LruCache::new(NonZeroUsize::new(capacity).unwrap()),
default_ttl_seconds: 300,
}
}
fn get(&mut self, service: &str) -> Option<Config> {
self.cache.get(service).cloned()
}
fn set(&mut self, service: String, config: Config) {
self.cache.put(service, config);
}
fn invalidate(&mut self, service: &str) {
self.cache.pop(service);
}
fn update_value(&mut self, service: &str, key: &str, value: &str) -> bool {
if let Some(config) = self.cache.get_mut(service) {
config.values.insert(key.to_string(), value.to_string());
config.version += 1;
return true;
}
false
}
}
fn main() {
let mut cache = ConfigCache::new(50);
// Load config for a service
let mut config1 = Config {
values: HashMap::new(),
version: 1,
};
config1.values.insert("max_connections".to_string(), "100".to_string());
config1.values.insert("timeout".to_string(), "30".to_string());
cache.set("api-gateway".to_string(), config1);
// Get config
if let Some(config) = cache.get("api-gateway") {
println!("Config v{}: {:?}", config.version, config.values);
}
// Update a value
cache.update_value("api-gateway", "max_connections", "200");
// Get updated config
if let Some(config) = cache.get("api-gateway") {
println!("Updated config v{}: {:?}", config.version, config.values);
}
// Invalidate
cache.invalidate("api-gateway");
if cache.get("api-gateway").is_none() {
println!("Config invalidated");
}
}LruCache::new(capacity) creates a cache with a fixed maximum size (requires NonZeroUsize)put(key, value) inserts or updates an entry, evicting LRU item if at capacityget(&key) retrieves a value and promotes it to most-recently-usedpeek(&key) retrieves without updating LRU orderget_mut(&key) returns a mutable reference and promotes the entryget_or_insert(key, init) returns existing value or inserts new one from closurepop(&key) removes and returns a specific entryiter() iterates from least to most recently used; .rev() reverses ordercontains(&key) checks existence without updating LRU orderresize(new_capacity) changes capacity, evicting items if shrinkingArc<Mutex<_>> for thread-safe access