What is the difference between quote::ToTokens::to_tokens and into_token_stream for converting types to tokens?
quote::ToTokens::to_tokens appends tokens to an existing TokenStream, enabling efficient chaining and composition, while into_token_stream consumes the value and returns a new TokenStreamâmaking to_tokens the building block for token manipulation and into_token_stream the convenience method for one-shot conversion. The key distinction is that to_tokens borrows &self and modifies a TokenStream passed by reference, whereas into_token_stream takes self by value and produces an owned TokenStream.
The ToTokens Trait
use quote::{ToTokens, TokenStreamExt};
// The ToTokens trait definition (simplified):
// pub trait ToTokens {
// fn to_tokens(&self, tokens: &mut TokenStream);
//
// fn into_token_stream(self) -> TokenStream
// where
// Self: Sized,
// {
// let mut tokens = TokenStream::new();
// self.to_tokens(&mut tokens);
// tokens
// }
// }
fn demonstrate_trait() {
// ToTokens is implemented for many types:
// - Primitive types (bool, char, integers, floats)
// - String types (String, &str)
// - Rust syntax types (Ident, Path, Type, etc.)
// - TokenStream itself
}ToTokens is the core trait for converting values to token streams.
Basic to_tokens Usage
use quote::{quote, ToTokens, TokenStream};
use syn::Ident;
fn basic_to_tokens() {
let mut tokens = TokenStream::new();
let ident = Ident::new("my_var", proc_macro2::Span::call_site());
// to_tokens appends to existing TokenStream
ident.to_tokens(&mut tokens);
// tokens now contains: my_var
println!("{}", tokens);
}to_tokens appends tokens to a mutable TokenStream reference.
Basic into_token_stream Usage
use quote::{quote, ToTokens};
use syn::Ident;
fn basic_into_token_stream() {
let ident = Ident::new("my_var", proc_macro2::Span::call_site());
// into_token_stream consumes the ident and returns TokenStream
let tokens = ident.into_token_stream();
// tokens now contains: my_var
println!("{}", tokens);
// ident is no longer usable - it was consumed
// println!("{:?}", ident); // Error: value borrowed after move
}into_token_stream consumes the value and returns an owned TokenStream.
Method Signatures Compared
use quote::{ToTokens, TokenStream};
fn signatures() {
// to_tokens:
// fn to_tokens(&self, tokens: &mut TokenStream)
// - Borrows self (&self)
// - Takes mutable reference to TokenStream
// - Appends to existing tokens
// - Returns nothing
// into_token_stream:
// fn into_token_stream(self) -> TokenStream
// - Takes ownership of self
// - Creates new TokenStream internally
// - Returns owned TokenStream
// - Self is consumed
// Default implementation of into_token_stream:
// fn into_token_stream(self) -> TokenStream {
// let mut tokens = TokenStream::new();
// self.to_tokens(&mut tokens);
// tokens
// }
}to_tokens borrows and appends; into_token_stream consumes and returns.
Chaining Multiple Values
use quote::{quote, ToTokens, TokenStream, TokenStreamExt};
use syn::Ident;
fn chaining_values() {
let mut tokens = TokenStream::new();
let ident1 = Ident::new("first", proc_macro2::Span::call_site());
let ident2 = Ident::new("second", proc_macro2::Span::call_site());
// to_tokens enables efficient chaining
ident1.to_tokens(&mut tokens);
tokens.extend(quote!(+));
ident2.to_tokens(&mut tokens);
// Result: first + second
println!("{}", tokens);
// Alternative using extend (which uses to_tokens internally)
let mut tokens2 = TokenStream::new();
tokens2.extend([ident1, quote!(+), ident2]);
}to_tokens enables efficient composition of multiple values.
TokenStreamExt and extend
use quote::{quote, TokenStream, TokenStreamExt};
fn token_stream_extend() {
// extend uses to_tokens internally
let mut tokens = TokenStream::new();
let ident = syn::Ident::new("value", proc_macro2::Span::call_site());
// extend calls to_tokens on each item
tokens.extend(quote!(let #ident = 42;));
// extend_one for single items
tokens.extend_one(quote!(println!("{}", #ident));
println!("{}", tokens);
}extend uses to_tokens to append multiple items.
When to Use to_tokens
use quote::{quote, ToTokens, TokenStream, TokenStreamExt};
use syn::{Ident, Type, Expr};
fn when_to_use_to_tokens() {
// Use case 1: Building tokens incrementally
let mut tokens = TokenStream::new();
for i in 0..3 {
let ident = Ident::new(&format!("var_{}", i), proc_macro2::Span::call_site());
ident.to_tokens(&mut tokens);
tokens.extend(quote!(, ));
}
// Result: var_0, var_1, var_2,
// Use case 2: Appending to existing token stream
let mut output = quote!(fn main() { );
let body = quote!(println!("Hello"); );
body.to_tokens(&mut output);
output.extend(quote!(}));
// Use case 3: Custom ToTokens implementation
struct MyStruct {
name: Ident,
value: Expr,
}
impl ToTokens for MyStruct {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.name.to_tokens(tokens);
tokens.extend(quote!(: ));
self.value.to_tokens(tokens);
}
}
// Use case 4: Conditional token generation
fn generate_if_some<T: ToTokens>(tokens: &mut TokenStream, opt: Option<T>) {
if let Some(val) = opt {
val.to_tokens(tokens);
}
}
}Use to_tokens for incremental building and custom implementations.
When to Use into_token_stream
use quote::{quote, ToTokens};
use syn::Ident;
fn when_to_use_into_token_stream() {
// Use case 1: One-shot conversion
let ident = Ident::new("value", proc_macro2::Span::call_site());
let tokens = ident.into_token_stream();
// Use case 2: When you need owned TokenStream
fn get_tokens<T: ToTokens>(val: T) -> proc_macro2::TokenStream {
val.into_token_stream()
}
// Use case 3: Returning from functions
fn generate_ident(name: &str) -> proc_macro2::TokenStream {
let ident = Ident::new(name, proc_macro2::Span::call_site());
ident.into_token_stream()
}
// Use case 4: Quick conversion for quote! macro
fn make_tokens() -> proc_macro2::TokenStream {
let ident = Ident::new("x", proc_macro2::Span::call_site());
let val = 42usize;
quote! {
let #ident = #val;
}
}
}Use into_token_stream for convenient one-shot conversion.
Implementing ToTokens for Custom Types
use quote::{quote, ToTokens, TokenStream};
use syn::{Ident, Type, Expr};
struct Field {
name: Ident,
ty: Type,
default: Option<Expr>,
}
impl ToTokens for Field {
fn to_tokens(&self, tokens: &mut TokenStream) {
// Append name
self.name.to_tokens(tokens);
// Append colon
tokens.extend(quote!(: ));
// Append type
self.ty.to_tokens(tokens);
// Optionally append default
if let Some(ref default) = self.default {
tokens.extend(quote!( = ));
default.to_tokens(tokens);
}
}
}
struct Struct {
name: Ident,
fields: Vec<Field>,
}
impl ToTokens for Struct {
fn to_tokens(&self, tokens: &mut TokenStream) {
tokens.extend(quote!(struct #name {));
for (i, field) in self.fields.iter().enumerate() {
if i > 0 {
tokens.extend(quote!(, ));
}
field.to_tokens(tokens);
}
tokens.extend(quote!(}));
}
}
// Using the implementation
fn use_custom_totokens() {
let my_struct = Struct {
name: Ident::new("MyStruct", proc_macro2::Span::call_site()),
fields: vec
![Field {
name: Ident::new("id", proc_macro2::Span::call_site()),
ty: syn::parse_quote!(u32),
default: Some(syn::parse_quote!(0)),
}]
,
};
// Using to_tokens
let mut tokens = TokenStream::new();
my_struct.to_tokens(&mut tokens);
// Using into_token_stream
let tokens = my_struct.into_token_stream();
}Implement to_tokens for custom types to enable token generation.
Memory and Performance
use quote::{quote, ToTokens, TokenStream};
fn performance_comparison() {
// to_tokens: Appends to existing TokenStream
// - No new TokenStream allocation if reusing
// - Efficient for building incrementally
// - Reuses existing buffer
let mut tokens = TokenStream::new();
for _ in 0..100 {
// Efficient: appends to same buffer
"item".to_tokens(&mut tokens);
}
// into_token_stream: Creates new TokenStream each time
// - Allocates new TokenStream
// - Good for one-shot conversion
// - Can be less efficient for loops
let mut tokens_vec = Vec::new();
for _ in 0..100 {
// Each call creates new TokenStream
let t = "item".into_token_stream();
tokens_vec.push(t);
}
}to_tokens is more efficient for incremental building.
Composition Pattern
use quote::{quote, ToTokens, TokenStream};
// to_tokens enables composition
fn compose_tokens() {
let mut tokens = TokenStream::new();
// Start with a struct
tokens.extend(quote!(struct Point {));
// Add fields conditionally
let has_x = true;
if has_x {
tokens.extend(quote!(x: f64, ));
}
// Add more fields
tokens.extend(quote!(y: f64, ));
tokens.extend(quote!(z: f64));
// Close struct
tokens.extend(quote!(}));
println!("{}", tokens);
}
// Pattern: Build tokens piece by piece
fn incremental_building() {
let mut tokens = TokenStream::new();
// Each call appends
tokens.extend(quote!(fn foo()));
tokens.extend(quote!( -> i32));
tokens.extend(quote!( { 42 }));
// vs into_token_stream would require combining
let fn_keyword = quote!(fn );
let name = quote!(foo);
let combined = quote! {
#fn_keyword #name() -> i32 { 42 }
};
}to_tokens enables building tokens piece by piece.
The quote! Macro and ToTokens
use quote::{quote, ToTokens, TokenStream};
use syn::Ident;
fn quote_macro_usage() {
let ident = Ident::new("value", proc_macro2::Span::call_site());
// quote! macro uses ToTokens internally
// #var calls var.to_tokens()
let tokens = quote! {
let #ident = 42;
println!("{}", #ident);
};
// quote! is implemented roughly as:
fn manual_quote(ident: Ident) -> TokenStream {
let mut tokens = TokenStream::new();
tokens.extend(quote!(let ));
ident.to_tokens(&mut tokens);
tokens.extend(quote!( = 42; ));
tokens
}
}The quote! macro uses to_tokens for interpolation.
Working with Optional Values
use quote::{quote, ToTokens, TokenStream};
// Implement ToTokens for Option<T>
fn optional_tokens() {
let mut tokens = TokenStream::new();
let some_value = Some(syn::Ident::new("x", proc_macro2::Span::call_site()));
let none_value: Option<syn::Ident> = None;
// Option<T> implements ToTokens
// Some(T) -> T.to_tokens()
// None -> empty
some_value.to_tokens(&mut tokens);
println!("{}", tokens); // "x"
let mut tokens2 = TokenStream::new();
none_value.to_tokens(&mut tokens2);
println!("{}", tokens2); // "" (empty)
// Useful in quote! macro:
let name = Some(syn::Ident::new("var", proc_macro2::Span::call_site()));
let tokens = quote! {
let #name = 1; // Expands to: let var = 1;
};
let name: Option<syn::Ident> = None;
let tokens = quote! {
let #name = 1; // Expands to: let = 1; (empty where #name was)
};
}Option<T> implements ToTokens for convenient optional interpolation.
Default Implementation
use quote::{ToTokens, TokenStream};
// into_token_stream has a default implementation:
// fn into_token_stream(self) -> TokenStream
// where
// Self: Sized,
// {
// let mut tokens = TokenStream::new();
// self.to_tokens(&mut tokens);
// tokens
// }
// This means:
// 1. You only need to implement to_tokens
// 2. into_token_stream is automatically provided
// 3. You can override into_token_stream if needed
// Example: Custom implementation with override
struct MyType(String);
impl ToTokens for MyType {
fn to_tokens(&self, tokens: &mut TokenStream) {
// Default implementation
tokens.extend(proc_macro2::TokenStream::from(
proc_macro2::Ident::new(&self.0, proc_macro2::Span::call_site())
));
}
// into_token_stream would use default if not provided
// But you can override for optimization:
fn into_token_stream(self) -> TokenStream {
// More efficient implementation if possible
proc_macro2::TokenStream::from(
proc_macro2::Ident::new(&self.0, proc_macro2::Span::call_site())
)
}
}into_token_stream has a default implementation using to_tokens.
Interpolation in quote!
use quote::{quote, ToTokens, TokenStreamExt};
use syn::{Ident, Type};
fn interpolation() {
// quote! uses ToTokens for all interpolations
let ident = Ident::new("my_var", proc_macro2::Span::call_site());
let ty: Type = syn::parse_quote!(i32);
// #ident calls ident.to_tokens()
let tokens = quote! {
let #ident: #ty = 0;
};
// Repetition also uses to_tokens
let idents: Vec<Ident> = (0..3)
.map(|i| Ident::new(&format!("var_{}", i), proc_macro2::Span::call_site()))
.collect();
// #(#idents),* expands each ident using to_tokens
let tokens = quote! {
let (#(#idents),*) = (0, 1, 2);
};
// Manual expansion would be:
fn manual_expand(idents: &[Ident]) -> proc_macro2::TokenStream {
let mut tokens = proc_macro2::TokenStream::new();
tokens.extend(quote!(let ());
for (i, ident) in idents.iter().enumerate() {
if i > 0 {
tokens.extend(quote!(, ));
}
ident.to_tokens(&mut tokens);
}
tokens.extend(quote!( = (0, 1, 2);));
tokens
}
}The quote! macro relies on ToTokens for all interpolations.
Syn Types and ToTokens
use quote::{quote, ToTokens, TokenStream};
use syn::{Ident, Path, Type, Expr, Item, ItemFn};
fn syn_types() {
// Most syn types implement ToTokens
let ident: Ident = syn::parse_quote!(my_ident);
let path: Path = syn::parse_quote!(std::collections::HashMap);
let ty: Type = syn::parse_quote!(Vec<String>);
let expr: Expr = syn::parse_quote!(1 + 2);
let item: Item = syn::parse_quote!(fn foo() {} );
// All can be converted to tokens
let mut tokens = TokenStream::new();
ident.to_tokens(&mut tokens);
path.to_tokens(&mut tokens);
ty.to_tokens(&mut tokens);
expr.to_tokens(&mut tokens);
item.to_tokens(&mut tokens);
// Or into TokenStream directly
let ident_tokens: TokenStream = ident.into_token_stream();
let path_tokens: TokenStream = path.into_token_stream();
}Most syn types implement ToTokens.
Practical Procedural Macro Example
use quote::{quote, ToTokens, TokenStream};
use syn::{parse_quote, DeriveInput, Ident, Data, Fields};
// Example: Derive macro that uses ToTokens
fn derive_example(input: DeriveInput) -> proc_macro2::TokenStream {
let name = &input.ident;
let generics = &input.generics;
// Use to_tokens for incremental building
let mut impl_tokens = TokenStream::new();
// Start impl
impl_tokens.extend(quote!(impl #generics MyTrait for #name));
// Add where clause if needed
if let Some(where_clause) = &generics.where_clause {
where_clause.to_tokens(&mut impl_tokens);
}
// Add impl body
impl_tokens.extend(quote!( {
fn my_method(&self) -> &str {
stringify!(#name)
}
}));
impl_tokens
}
// Alternative using into_token_stream
fn derive_into_ts(input: DeriveInput) -> proc_macro2::TokenStream {
let name = &input.ident;
// Create TokenStream directly
let impl_tokens = quote! {
impl MyTrait for #name {
fn my_method(&self) -> &str {
stringify!(#name)
}
}
};
impl_tokens.into_token_stream() // Actually quote! already returns TokenStream
}Procedural macros use ToTokens extensively for code generation.
Comparison Summary
use quote::{ToTokens, TokenStream};
fn summary() {
// | Aspect | to_tokens | into_token_stream |
// |-------------------|------------------------------|--------------------------|
// | Self | &self (borrows) | self (consumes) |
// | TokenStream | &mut TokenStream (appends) | Returns TokenStream |
// | Return | () (modifies in place) | TokenStream (new) |
// | Allocation | None (uses existing) | New TokenStream |
// | Use case | Incremental building | One-shot conversion |
// | Composition | Excellent for chaining | Less suitable |
// Both produce the same token output, just different API style
}Both produce identical output; the difference is in usage patterns.
Synthesis
Quick reference:
use quote::{quote, ToTokens, TokenStream};
// to_tokens: Append to existing TokenStream
let mut tokens = TokenStream::new();
value.to_tokens(&mut tokens); // Borrows value, appends to tokens
// value is still usable, tokens now contains value's tokens
// into_token_stream: Consume and return new TokenStream
let tokens = value.into_token_stream(); // Consumes value, returns TokenStream
// value is no longer usable
// Relationship (default implementation):
// fn into_token_stream(self) -> TokenStream {
// let mut tokens = TokenStream::new();
// self.to_tokens(&mut tokens);
// tokens
// }When to use each:
use quote::{quote, ToTokens, TokenStream};
// Use to_tokens when:
// - Building tokens incrementally
// - Appending to existing TokenStream
// - Implementing ToTokens for custom types
// - Looping and adding multiple items
// - Need to reuse the value after conversion
// Use into_token_stream when:
// - One-shot conversion
// - Function returns TokenStream
// - Value is no longer needed
// - Convenience is preferred over performanceKey insight: to_tokens and into_token_stream are two sides of the same coinâto_tokens is the fundamental operation that appends tokens to an existing TokenStream, while into_token_stream is a convenience wrapper that creates a new TokenStream and calls to_tokens. The default implementation of into_token_stream simply creates an empty TokenStream, calls to_tokens to populate it, and returns the result. This design reflects a common Rust pattern: to_tokens is the "low-level" operation that enables efficient composition and chaining (you can call it multiple times on the same TokenStream), while into_token_stream is the "high-level" convenience for cases where you just need the tokens. When implementing ToTokens for your own types, you only need to implement to_tokens; into_token_stream is automatically provided. The quote! macro relies entirely on to_tokens for interpolationâevery #variable in a quote! macro becomes a call to variable.to_tokens(), which is why any type implementing ToTokens can be interpolated.
