forked from rust-lang/rust
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Rollup merge of rust-lang#131441 - SpriteOvO:proc-macro-to-tokens-tra…
…it, r=dtolnay Add a new trait `proc_macro::ToTokens` Tracking issue rust-lang#130977 This PR adds a new trait `ToTokens`, implemented for types that can be interpolated inside a `quote!` invocation. ```rust impl ToTokens for TokenTree impl ToTokens for TokenStream impl ToTokens for Literal impl ToTokens for Ident impl ToTokens for Punct impl ToTokens for Group impl<T: ToTokens + ?Sized> ToTokens for &T impl<T: ToTokens + ?Sized> ToTokens for &mut T impl<T: ToTokens + ?Sized> ToTokens for Box<T> impl<T: ToTokens + ?Sized> ToTokens for Rc<T> impl<T: ToTokens + ToOwned + ?Sized> ToTokens for Cow<'_, T> impl<T: ToTokens> ToTokens for Option<T> impl ToTokens for u{8,16,32,64,128} impl ToTokens for i{8,16,32,64,128} impl ToTokens for f{32,64} impl ToTokens for {u,i}size impl ToTokens for bool impl ToTokens for char impl ToTokens for str impl ToTokens for String impl ToTokens for CStr impl ToTokens for CString ``` ~This PR also implements the migration mentioned in the tracking issue, replacing `Extend<Token{Tree,Stream}>` with `Extend<T: ToTokens>`, and replacing `FromIterator<Token{Tree,Stream}>` with `FromIterator<T: ToTokens>`.~ **UPDATE**: Reverted. ```diff -impl FromIterator<TokenTree> for TokenStream -impl FromIterator<TokenStream> for TokenStream +impl<T: ToTokens> FromIterator<T> for TokenStream -impl Extend<TokenTree> for TokenStream -impl Extend<TokenStream> for TokenStream +impl<T: ToTokens> Extend<T> for TokenStream ``` I'm going to leave some comments in the review where I'm unsure and concerned. r? ``@dtolnay`` CC ``@tgross35``
- Loading branch information
Showing
2 changed files
with
314 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,310 @@ | ||
use std::borrow::Cow; | ||
use std::ffi::{CStr, CString}; | ||
use std::rc::Rc; | ||
|
||
use crate::{ConcatTreesHelper, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree}; | ||
|
||
/// Types that can be interpolated inside a [`quote!`] invocation. | ||
/// | ||
/// [`quote!`]: crate::quote! | ||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
pub trait ToTokens { | ||
/// Write `self` to the given `TokenStream`. | ||
/// | ||
/// # Example | ||
/// | ||
/// Example implementation for a struct representing Rust paths like | ||
/// `std::cmp::PartialEq`: | ||
/// | ||
/// ``` | ||
/// #![feature(proc_macro_totokens)] | ||
/// | ||
/// use std::iter; | ||
/// use proc_macro::{Spacing, Punct, TokenStream, TokenTree, ToTokens}; | ||
/// | ||
/// pub struct Path { | ||
/// pub global: bool, | ||
/// pub segments: Vec<PathSegment>, | ||
/// } | ||
/// | ||
/// impl ToTokens for Path { | ||
/// fn to_tokens(&self, tokens: &mut TokenStream) { | ||
/// for (i, segment) in self.segments.iter().enumerate() { | ||
/// if i > 0 || self.global { | ||
/// // Double colon `::` | ||
/// tokens.extend(iter::once(TokenTree::from(Punct::new(':', Spacing::Joint)))); | ||
/// tokens.extend(iter::once(TokenTree::from(Punct::new(':', Spacing::Alone)))); | ||
/// } | ||
/// segment.to_tokens(tokens); | ||
/// } | ||
/// } | ||
/// } | ||
/// # | ||
/// # pub struct PathSegment; | ||
/// # | ||
/// # impl ToTokens for PathSegment { | ||
/// # fn to_tokens(&self, tokens: &mut TokenStream) { | ||
/// # unimplemented!() | ||
/// # } | ||
/// # } | ||
/// ``` | ||
fn to_tokens(&self, tokens: &mut TokenStream); | ||
|
||
/// Convert `self` directly into a `TokenStream` object. | ||
/// | ||
/// This method is implicitly implemented using `to_tokens`, and acts as a | ||
/// convenience method for consumers of the `ToTokens` trait. | ||
fn to_token_stream(&self) -> TokenStream { | ||
let mut tokens = TokenStream::new(); | ||
self.to_tokens(&mut tokens); | ||
tokens | ||
} | ||
|
||
/// Convert `self` directly into a `TokenStream` object. | ||
/// | ||
/// This method is implicitly implemented using `to_tokens`, and acts as a | ||
/// convenience method for consumers of the `ToTokens` trait. | ||
fn into_token_stream(self) -> TokenStream | ||
where | ||
Self: Sized, | ||
{ | ||
self.to_token_stream() | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for TokenTree { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
tokens.extend_one(self.clone()); | ||
} | ||
|
||
fn into_token_stream(self) -> TokenStream { | ||
let mut builder = ConcatTreesHelper::new(1); | ||
builder.push(self); | ||
builder.build() | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for TokenStream { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
tokens.extend(self.clone()); | ||
} | ||
|
||
fn into_token_stream(self) -> TokenStream { | ||
self | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for Literal { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
tokens.extend_one(TokenTree::from(self.clone())); | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for Ident { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
tokens.extend_one(TokenTree::from(self.clone())); | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for Punct { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
tokens.extend_one(TokenTree::from(self.clone())); | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for Group { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
tokens.extend_one(TokenTree::from(self.clone())); | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl<T: ToTokens + ?Sized> ToTokens for &T { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
(**self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl<T: ToTokens + ?Sized> ToTokens for &mut T { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
(**self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl<T: ToTokens + ?Sized> ToTokens for Box<T> { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
(**self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl<T: ToTokens + ?Sized> ToTokens for Rc<T> { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
(**self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl<T: ToTokens + ToOwned + ?Sized> ToTokens for Cow<'_, T> { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
(**self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl<T: ToTokens> ToTokens for Option<T> { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
if let Some(t) = self { | ||
t.to_tokens(tokens); | ||
} | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for u8 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::u8_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for u16 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::u16_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for u32 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::u32_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for u64 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::u64_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for u128 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::u128_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for i8 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::i8_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for i16 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::i16_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for i32 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::i32_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for i64 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::i64_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for i128 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::i128_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for f32 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::f32_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for f64 { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::f64_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for usize { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::usize_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for isize { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::isize_suffixed(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for bool { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
let word = if *self { "true" } else { "false" }; | ||
Ident::new(word, Span::call_site()).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for char { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::character(*self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for str { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::string(self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for String { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::string(self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for CStr { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::c_string(self).to_tokens(tokens) | ||
} | ||
} | ||
|
||
#[unstable(feature = "proc_macro_totokens", issue = "130977")] | ||
impl ToTokens for CString { | ||
fn to_tokens(&self, tokens: &mut TokenStream) { | ||
Literal::c_string(self).to_tokens(tokens) | ||
} | ||
} |