What is the purpose of once_cell::unsync::Lazy for single-threaded lazy initialization?

once_cell::unsync::Lazy provides a thread-safe-at-runtime-cost alternative to lazy static initialization for single-threaded contexts, storing a value that is computed once on first access and then cached for subsequent use—offering zero-cost initialization when the value is never accessed, deterministic initialization order (unlike lazy_static! which initializes in undefined order), and seamless integration with Rust's type system without requiring macros. The unsync variant specifically is for single-threaded use and has no synchronization overhead, making it ideal for configuration loading, expensive computations that may not be needed, and struct fields that should be initialized lazily.

Understanding Lazy Initialization

use once_cell::unsync::Lazy;
 
// Lazy<T> stores a value that's computed once on first access
 
// The closure is not executed until the value is accessed
static CONFIG: Lazy<Config> = Lazy::new(|| {
    println!("Loading configuration...");
    Config::load_from_file("config.toml").unwrap()
});
 
struct Config {
    database_url: String,
    api_key: String,
}
 
impl Config {
    fn load_from_file(path: &str) -> Result<Self, std::io::Error> {
        // Simulated config loading
        Ok(Config {
            database_url: "postgres://localhost/db".to_string(),
            api_key: "secret-key".to_string(),
        })
    }
}
 
fn main() {
    println!("Program started");
    
    // First access triggers initialization
    println!("First access: {}", CONFIG.database_url);
    // Output:
    // Program started
    // Loading configuration...
    // First access: postgres://localhost/db
    
    // Second access uses cached value
    println!("Second access: {}", CONFIG.database_url);
    // Output: Second access: postgres://localhost/db
    // Note: "Loading configuration..." is NOT printed again
}

Lazy<T> defers initialization until first access and caches the result.

unsync vs sync Variants

use once_cell::unsync::Lazy as UnsyncLazy;
use once_cell::sync::Lazy as SyncLazy;
 
// unsync::Lazy - Single-threaded only, no synchronization overhead
// Cannot be shared between threads, no atomic operations
 
fn unsync_example() {
    // This can only be used in single-threaded contexts
    let lazy_value: UnsyncLazy<i32> = UnsyncLazy::new(|| {
        println!("Computing...");
        42
    });
    
    // If you try to share this across threads, it won't compile:
    // std::thread::spawn(|| {
    //     println!("{}", *lazy_value);  // ERROR: UnsyncLazy is not Send/Sync
    // });
    
    println!("Value: {}", *lazy_value);
}
 
// sync::Lazy - Multi-threaded, has synchronization overhead
// Can be shared between threads, uses atomics for coordination
 
fn sync_example() {
    // This can be used in multi-threaded contexts
    static SYNC_LAZY: SyncLazy<i32> = SyncLazy::new(|| {
        println!("Computing sync...");
        42
    });
    
    std::thread::spawn(|| {
        println!("Thread 1: {}", *SYNC_LAZY);
    });
    
    std::thread::spawn(|| {
        println!("Thread 2: {}", *SYNC_LAZY);
    });
    
    // sync::Lazy ensures only one thread initializes the value
    // Others wait for initialization to complete
}

unsync::Lazy has no synchronization overhead but cannot be shared across threads.

Basic Usage Patterns

use once_cell::unsync::Lazy;
 
fn basic_patterns() {
    // Static lazy value
    static WEBSITE_URL: Lazy<String> = Lazy::new(|| {
        "https://example.com".to_string()
    });
    
    // Lazy in a local variable
    let expensive_computation = Lazy::new(|| {
        println!("Performing expensive computation...");
        (1..=1000).sum::<i32>()
    });
    
    // First access initializes
    println!("Sum: {}", *expensive_computation);  // Prints "Performing expensive computation..."
    
    // Second access uses cached value
    println!("Sum: {}", *expensive_computation);  // Does NOT print "Performing..."
    
    // Check if initialized
    if Lazy::get(&expensive_computation).is_some() {
        println!("Value has been initialized");
    }
}

Lazy works for both static variables and local variables.

Struct Field Lazy Initialization

use once_cell::unsync::Lazy;
 
struct Application {
    // Configuration loaded on first access
    config: Lazy<Config>,
    
    // Database connection pool created on first access
    connection_pool: Lazy<ConnectionPool>,
    
    // Some expensive-to-compute field
    compiled_regex: Lazy<regex::Regex>,
}
 
struct Config {
    database_url: String,
}
 
struct ConnectionPool {
    connections: Vec<String>,
}
 
impl Application {
    fn new() -> Self {
        Application {
            // Initialization closures are defined but not executed
            config: Lazy::new(|| {
                println!("Loading config...");
                Config {
                    database_url: "postgres://localhost/myapp".to_string(),
                }
            }),
            
            connection_pool: Lazy::new(|| {
                println!("Creating connection pool...");
                ConnectionPool {
                    connections: vec!["conn1".to_string(), "conn2".to_string()],
                }
            }),
            
            compiled_regex: Lazy::new(|| {
                println!("Compiling regex...");
                regex::Regex::new(r"^\d{4}-\d{2}-\d{2}$").unwrap()
            }),
        }
    }
    
    fn get_database_url(&self) -> &str {
        // Only loads config when first accessed
        &self.config.database_url
    }
    
    fn validate_date(&self, date: &str) -> bool {
        // Only compiles regex when first accessed
        self.compiled_regex.is_match(date)
    }
}
 
fn struct_example() {
    let app = Application::new();
    println!("Application created");
    
    // Config not loaded yet
    
    println!("Database URL: {}", app.get_database_url());
    // Now config is loaded
    
    // Config not loaded again
    println!("Database URL: {}", app.get_database_url());
}

Lazy<T> fields in structs defer initialization until first access.

Zero-Cost When Unused

use once_cell::unsync::Lazy;
 
struct OptionalFeatures {
    // These are never initialized if not accessed
    experimental_cache: Lazy<Vec<u8>>,
    debug_visualizer: Lazy<String>,
    unused_feature: Lazy<i32>,
}
 
fn zero_cost_example() {
    let features = OptionalFeatures {
        experimental_cache: Lazy::new(|| {
            println!("Initializing cache...");
            vec![0u8; 1024 * 1024]  // 1MB allocation
        }),
        
        debug_visualizer: Lazy::new(|| {
            println!("Initializing visualizer...");
            "debug output".to_string()
        }),
        
        unused_feature: Lazy::new(|| {
            println!("This should never print");
            42
        }),
    };
    
    // Only access one feature
    println!("Cache size: {}", features.experimental_cache.len());
    
    // unused_feature is NEVER initialized
    // The closure never runs, no memory is allocated
    // This is the "zero-cost when unused" property
}

If a Lazy value is never accessed, its initialization closure never runs.

Comparison with lazy_static!

use once_cell::unsync::Lazy;
 
// once_cell::unsync::Lazy approach
static CONFIG: Lazy<Config> = Lazy::new(|| {
    Config::load().unwrap()
});
 
// lazy_static! macro approach (requires lazy_static crate)
// lazy_static! {
//     static ref CONFIG: Config = Config::load().unwrap();
// }
 
struct Config {
    value: i32,
}
 
impl Config {
    fn load() -> Result<Self, std::io::Error> {
        Ok(Config { value: 42 })
    }
}
 
fn comparison_example() {
    // Key differences:
    
    // 1. Syntax
    // Lazy: Uses standard Rust syntax, no macro
    // lazy_static!: Requires macro invocation
    
    // 2. Initialization order
    // Lazy: Deterministic, initializes when first accessed
    // lazy_static!: Non-deterministic order for multiple statics
    
    // 3. Type safety
    // Lazy: Works with any type naturally
    // lazy_static!: Requires static ref wrapper
    
    // 4. Debugging
    // Lazy: Closure can be stepped through in debugger
    // lazy_static!: Macro expansion can be harder to debug
    
    // 5. Single-threaded variant
    // Lazy unsync: Available, zero synchronization overhead
    // lazy_static!: Always sync, has synchronization overhead
}

Lazy uses standard Rust syntax without macros and offers deterministic initialization order.

Deterministic Initialization Order

use once_cell::unsync::Lazy;
 
static FIRST: Lazy<i32> = Lazy::new(|| {
    println!("Initializing FIRST");
    1
});
 
static SECOND: Lazy<i32> = Lazy::new(|| {
    println!("Initializing SECOND");
    *FIRST + 1
});
 
static THIRD: Lazy<i32> = Lazy::new(|| {
    println!("Initializing THIRD");
    *SECOND + 1
});
 
fn deterministic_order() {
    println!("Program start");
    
    // Access THIRD first
    println!("Third: {}", *THIRD);
    
    // Output shows initialization order:
    // Program start
    // Initializing THIRD
    // Initializing SECOND
    // Initializing FIRST
    // Third: 3
    
    // Note: FIRST is initialized before SECOND
    // because SECOND depends on FIRST
    // The order is determined by access, not static definition
    
    // With lazy_static!, multiple statics might initialize
    // in unpredictable order, potentially causing issues
    // with dependencies between statics
}

Lazy initialization order is determined by access order, not definition order.

Lazy with Error Handling

use once_cell::unsync::Lazy;
use std::io::{self, Read};
 
struct FileContent {
    content: String,
}
 
fn load_file(path: &str) -> Result<FileContent, io::Error> {
    let mut content = String::new();
    // Simulated file reading
    std::fs::File::open(path)?.read_to_string(&mut content)?;
    Ok(FileContent { content })
}
 
// Lazy<T> where T is a Result
static FILE_CONTENT: Lazy<Result<FileContent, io::Error>> = 
    Lazy::new(|| load_file("data.txt"));
 
fn error_handling_example() {
    // The Result is computed once, cached, and can be accessed multiple times
    match Lazy::get(&FILE_CONTENT) {
        Some(Ok(content)) => {
            println!("File loaded: {}", content.content);
        }
        Some(Err(e)) => {
            println!("Error loading file: {}", e);
        }
        None => {
            println!("File not yet loaded");
        }
    }
    
    // Alternative: Store Result inside, handle at access time
    static FALLIBLE_VALUE: Lazy<String> = Lazy::new(|| {
        match load_file("config.txt") {
            Ok(content) => content.content,
            Err(_) => "default value".to_string(),
        }
    });
    
    // This version always succeeds, with fallback
    println!("Value: {}", *FALLIBLE_VALUE);
}

Handle initialization failures by returning Result from the closure or providing fallbacks.

Checking Initialization Status

use once_cell::unsync::Lazy;
 
static MAYBE_INITIALIZED: Lazy<String> = Lazy::new(|| {
    println!("Initializing...");
    "initialized".to_string()
});
 
fn check_status() {
    // Check if initialized without triggering initialization
    if Lazy::get(&MAYBE_INITIALIZED).is_some() {
        println!("Already initialized");
    } else {
        println!("Not yet initialized");
    }
    
    // Lazy::get returns Option<&T>
    match Lazy::get(&MAYBE_INITIALIZED) {
        Some(value) => println!("Value: {}", value),
        None => println!("Not initialized"),
    }
    
    // Force initialization
    let _ = &*MAYBE_INITIALIZED;
    
    // Now get returns Some
    if Lazy::get(&MAYBE_INITIALIZED).is_some() {
        println!("Now initialized");
    }
}

Lazy::get() returns Option<&T> without triggering initialization.

Lazy in Functions

use once_cell::unsync::Lazy;
 
// Lazy static inside function - initialized on first call
fn get_cached_computation() -> &'static i32 {
    static CACHED: Lazy<i32> = Lazy::new(|| {
        println!("Computing expensive value...");
        (1..=1000).sum()
    });
    
    &*CACHED
}
 
// Alternative: Lazy local variable (lives for function scope)
fn function_local_lazy() {
    let local_cache = Lazy::new(|| {
        println!("Creating local cache...");
        vec![1, 2, 3, 4, 5]
    });
    
    // First call initializes
    println!("Values: {:?}", &*local_cache);
    
    // Subsequent calls use cache
    println!("Values: {:?}", &*local_cache);
    
    // local_cache is dropped when function returns
}
 
fn function_example() {
    println!("First call:");
    let value1 = get_cached_computation();  // Initializes
    println!("Value: {}", value1);
    
    println!("\nSecond call:");
    let value2 = get_cached_computation();  // Uses cached
    println!("Value: {}", value2);
}

Lazy can be used as function-local statics or local variables.

Lazy with Closures Capturing Environment

use once_cell::unsync::Lazy;
 
// unsync::Lazy can capture local environment
fn closure_capturing() {
    let config_path = "config.toml".to_string();
    let debug_mode = true;
    
    // Lazy closure captures local variables
    let config = Lazy::new(|| {
        println!("Loading config from: {}", config_path);
        if debug_mode {
            println!("Debug mode enabled");
        }
        format!("Config from {}", config_path)
    });
    
    // Note: This only works because unsync::Lazy doesn't need to be Send
    // sync::Lazy cannot capture non-Send types
    
    println!("Config: {}", *config);
}
 
// For sync::Lazy, all captured values must be Send
fn sync_lazy_capture() {
    use once_cell::sync::Lazy as SyncLazy;
    
    let path = "config.toml".to_string();  // String is Send
    
    static SYNC_CONFIG: SyncLazy<String> = SyncLazy::new(|| {
        // Cannot capture non-Send types from environment
        format!("Config")
    });
}

unsync::Lazy can capture non-Send types from the environment; sync::Lazy cannot.

Lazy for Memoization

use once_cell::unsync::Lazy;
use std::collections::HashMap;
use std::cell::RefCell;
 
struct MemoizedFunction<F, T> {
    func: F,
    cache: RefCell<HashMap<i32, Lazy<T>>>,
}
 
impl<F, T: Clone> MemoizedFunction<F, T>
where
    F: Fn(i32) -> T,
{
    fn new(func: F) -> Self {
        MemoizedFunction {
            func,
            cache: RefCell::new(HashMap::new()),
        }
    }
    
    fn call(&self, input: i32) -> T {
        // Check if we've seen this input before
        if self.cache.borrow().contains_key(&input) {
            // Already have a Lazy for this input
            return self.cache.borrow()[&input].clone();
        }
        
        // Create new Lazy for this input
        let func = &self.func;
        let lazy_result = Lazy::new(|| func(input));
        let result = lazy_result.clone();
        
        self.cache.borrow_mut().insert(input, lazy_result);
        result
    }
}
 
fn memoization_example() {
    // Simpler memoization pattern
    static FIB_CACHE: Lazy<Vec<i64>> = Lazy::new(|| {
        println!("Building Fibonacci cache...");
        let mut cache = vec![0i64, 1i64];
        for i in 2..100 {
            cache.push(cache[i-1] + cache[i-2]);
        }
        cache
    });
    
    // First access builds cache
    println!("Fib(50): {}", FIB_CACHE[50]);
    
    // Subsequent access uses cache
    println!("Fib(60): {}", FIB_CACHE[60]);
}

Lazy is ideal for memoizing expensive computations that may or may not be needed.

Lazy vs OnceCell

use once_cell::unsync::{Lazy, OnceCell};
 
fn lazy_vs_oncecell() {
    // Lazy: Initialized by closure on first access
    let lazy_value: Lazy<String> = Lazy::new(|| {
        println!("Lazy initializing...");
        "computed value".to_string()
    });
    
    // Access triggers initialization
    println!("Lazy: {}", *lazy_value);
    
    // OnceCell: Initialized by explicit set or get_or_init
    let once_cell: OnceCell<String> = OnceCell::new();
    
    // Two ways to initialize:
    
    // Method 1: Explicit set
    once_cell.set("explicit value".to_string()).unwrap();
    
    // Method 2: get_or_init (like Lazy)
    let value = once_cell.get_or_init(|| {
        println!("OnceCell initializing...");
        "computed value".to_string()
    });
    
    // Key differences:
    // - Lazy: Always has a closure, initializes automatically on access
    // - OnceCell: Can be initialized explicitly or lazily
    // - Lazy: Value is always initialized eventually (on first access)
    // - OnceCell: Value might never be initialized
    
    // Use Lazy when:
    // - You always want to initialize the value
    // - You have an initialization closure
    // - You want simpler syntax
    
    // Use OnceCell when:
    // - You might not need the value
    // - Initialization might be optional
    // - You need to set the value explicitly sometimes
}

Lazy always has an initialization closure; OnceCell can be set explicitly.

Thread Safety Considerations

use once_cell::unsync::Lazy;
 
// unsync::Lazy is NOT thread-safe
// It cannot be shared across threads
 
// This won't compile:
// static GLOBAL_LAZY: Lazy<i32> = Lazy::new(|| 42);
//
// fn threaded_access() {
//     std::thread::spawn(|| {
//         println!("{}", *GLOBAL_LAZY);  // ERROR: Lazy<...> cannot be shared between threads
//     });
// }
 
// For thread-safe lazy values, use sync::Lazy:
use once_cell::sync::Lazy as SyncLazy;
 
static THREAD_SAFE: SyncLazy<i32> = SyncLazy::new(|| {
    println!("Thread-safe initialization");
    42
});
 
fn thread_safe_example() {
    // This works across threads
    std::thread::spawn(|| {
        println!("Thread 1: {}", *THREAD_SAFE);
    });
    
    std::thread::spawn(|| {
        println!("Thread 2: {}", *THREAD_SAFE);
    });
    
    // sync::Lazy uses atomic operations to ensure
    // only one thread initializes the value
}
 
// Use unsync::Lazy when:
// - Single-threaded application
// - Thread-local storage
// - Function-local statics that don't escape threads
// - Struct fields in single-threaded contexts

Use sync::Lazy for multi-threaded contexts; unsync::Lazy for single-threaded performance.

Integration with Other Patterns

use once_cell::unsync::Lazy;
 
struct Service {
    // Lazy-loaded components
    database: Lazy<Database>,
    cache: Lazy<Cache>,
    logger: Lazy<Logger>,
}
 
struct Database;
struct Cache;
struct Logger;
 
impl Service {
    fn new() -> Self {
        Service {
            database: Lazy::new(|| {
                println!("Connecting to database...");
                Database
            }),
            cache: Lazy::new(|| {
                println!("Initializing cache...");
                Cache
            }),
            logger: Lazy::new(|| {
                println!("Setting up logger...");
                Logger
            }),
        }
    }
    
    fn get_data(&self) -> &Database {
        &*self.database  // Initializes on first call
    }
    
    fn log(&self, message: &str) {
        // Logger only initialized if we log something
        let _ = &*self.logger;
        println!("LOG: {}", message);
    }
}
 
// Builder pattern with Lazy
struct ConfigBuilder {
    config: Lazy<Config>,
}
 
impl ConfigBuilder {
    fn new() -> Self {
        ConfigBuilder {
            config: Lazy::new(|| Config::default()),
        }
    }
    
    fn with_value(mut self, value: i32) -> Self {
        // Cannot modify Lazy once created
        // This pattern would need OnceCell for mutability
        self
    }
    
    fn build(self) -> &'static Config {
        // For demonstration; would need Box::leak for 'static
        // or return owned Config
        unimplemented!()
    }
}
 
struct Config {
    value: i32,
}
 
impl Config {
    fn default() -> Self {
        Config { value: 0 }
    }
}

Lazy integrates well with structs that have expensive-to-initialize components.

Performance Characteristics

use once_cell::unsync::Lazy;
 
fn performance_characteristics() {
    // unsync::Lazy has minimal overhead:
    
    // 1. Size: 2 pointers + size of T (when initialized)
    //    - Pointer to data
    //    - Initialization state flag
    
    // 2. First access cost:
    //    - Check if initialized (branch)
    //    - Execute closure
    //    - Store result
    
    // 3. Subsequent access cost:
    //    - Check if initialized (branch)
    //    - Dereference pointer
    
    // 4. No synchronization overhead:
    //    - No atomic operations
    //    - No locks
    
    // Comparison with sync::Lazy:
    // sync::Lazy has:
    // - Atomic flag for initialization state
    // - Possible blocking during initialization
    // - Memory barriers for visibility
    
    // unsync::Lazy has:
    // - Simple boolean check
    // - No blocking
    // - No memory barriers
    
    // Best for:
    // - Single-threaded contexts
    // - Thread-local data
    // - When synchronization cost matters
}

unsync::Lazy has minimal overhead: just a branch check and pointer dereference after initialization.

Synthesis

Core purpose of unsync::Lazy:

// 1. Defer expensive initialization until needed
// 2. Initialize exactly once on first access
// 3. Zero cost when never accessed
// 4. Single-threaded optimization (no sync overhead)
 
use once_cell::unsync::Lazy;
 
static EXPENSIVE: Lazy<Vec<i32>> = Lazy::new(|| {
    (0..1000000).filter(|&x| x % 7 == 0).collect()
});
// If never accessed, the million-filter never runs

Key characteristics:

// - Single-threaded: Cannot be shared across threads
// - Zero-sync overhead: No atomics, no locks
// - Deterministic: Initialized in access order
// - Macro-free: Standard Rust syntax
// - Composable: Can capture environment in closures

Use cases:

// 1. Configuration loading
static CONFIG: Lazy<Config> = Lazy::new(|| Config::load().unwrap());
 
// 2. Expensive computation caching
static RESULT: Lazy<i32> = Lazy::new(|| expensive_computation());
 
// 3. Struct fields that may not be used
struct Service {
    optional_feature: Lazy<Feature>,  // Only created if accessed
}
 
// 4. Thread-local data
thread_local! {
    static LOCAL_CACHE: Lazy<Cache> = Lazy::new(|| Cache::new());
}
 
// 5. Function-local static caches
fn get_data() -> &'static Data {
    static CACHE: Lazy<Data> = Lazy::new(|| Data::load());
    &*CACHE
}

When to use unsync vs sync:

// unsync::Lazy when:
// - Single-threaded application
// - Thread-local data
// - Function-local static that doesn't escape threads
// - Performance-critical code without thread contention
 
// sync::Lazy when:
// - Multi-threaded application
// - Global static accessed from multiple threads
// - Shared configuration or cache
 
// Note: In modern Rust (1.70+), std::sync::LazyLock is equivalent
// to once_cell::sync::Lazy, and std::sync::OnceLock is equivalent
// to once_cell::sync::OnceCell

Key insight: once_cell::unsync::Lazy provides lazy initialization for single-threaded contexts with zero synchronization overhead—no atomic operations, no locks, just a simple initialization flag check and pointer dereference. It stores an initialization closure that runs exactly once on first access, caching the result for all subsequent accesses. Unlike lazy_static!, it uses standard Rust syntax without macros, provides deterministic initialization order (determined by access order rather than linker order), and integrates naturally with structs and local variables. The unsync variant specifically trades thread safety for performance, making it ideal for thread-local data, function-local statics, struct fields in single-threaded contexts, and any situation where the value will only ever be accessed from a single thread. If the value is never accessed, the initialization closure never runs—the "zero-cost when unused" property that distinguishes lazy initialization from eager initialization.