Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
349 changes: 267 additions & 82 deletions library/proc_macro/src/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
//! This quasiquoter uses macros 2.0 hygiene to reliably access
//! items from `proc_macro`, to build a `proc_macro::TokenStream`.

use core::iter::Peekable;

use crate::{
BitOr, Delimiter, Group, Ident, Literal, Punct, Spacing, Span, ToTokens, TokenStream, TokenTree,
};
Expand Down Expand Up @@ -283,97 +285,35 @@ pub fn quote(stream: TokenStream) -> TokenStream {
let proc_macro_crate = minimal_quote!(crate);
let mut after_dollar = false;

let mut tokens = crate::TokenStream::new();
let mut iter = stream.into_iter().peekable();
let mut tokens = TokenStream::new();
let mut iter: Peekable<<TokenStream as IntoIterator>::IntoIter> = stream.into_iter().peekable();
while let Some(tree) = iter.next() {
if after_dollar {
after_dollar = false;
match tree {
TokenTree::Group(tt) => {
// Handles repetition by expanding `$( CONTENTS ) SEP_OPT *` to `{ REP_EXPANDED }`.
let contents = tt.stream();

// The `*` token is also consumed here.
let sep_opt: Option<Punct> = match (iter.next(), iter.peek()) {
(Some(TokenTree::Punct(sep)), Some(TokenTree::Punct(star)))
if sep.spacing() == Spacing::Joint && star.as_char() == '*' =>
{
iter.next();
Some(sep)
}
(Some(TokenTree::Punct(star)), _) if star.as_char() == '*' => None,
_ => panic!("`$(...)` must be followed by `*` in `quote!`"),
};

let mut rep_expanded = TokenStream::new();

// Append setup code for a `while`, where recursively quoted `CONTENTS`
// and `SEP_OPT` are repeatedly processed, to `REP_EXPANDED`.
let meta_vars = collect_meta_vars(contents.clone());
TokenTree::Group(ref tt) if tt.delimiter() == Delimiter::Parenthesis => {
consume_dollar_group_sep_star(tt.stream().clone(), &mut iter)
.to_tokens(&mut tokens);
continue;
}
TokenTree::Group(_) => {
minimal_quote!(
use crate::ext::*;
(@ if sep_opt.is_some() {
minimal_quote!(let mut _i = 0usize;)
} else {
minimal_quote!(();)
})
let has_iter = crate::ThereIsNoIteratorInRepetition;
crate::ToTokens::to_tokens(&TokenTree::from(Punct::new('$', Spacing::Joint)), &mut ts);
)
.to_tokens(&mut rep_expanded);
for meta_var in &meta_vars {
minimal_quote!(
#[allow(unused_mut)]
let (mut (@ meta_var), i) = (@ meta_var).quote_into_iter();
let has_iter = has_iter | i;
)
.to_tokens(&mut rep_expanded);
}
minimal_quote!(let _: crate::HasIterator = has_iter;)
.to_tokens(&mut rep_expanded);

// Append the `while` to `REP_EXPANDED`.
let mut while_body = TokenStream::new();
for meta_var in &meta_vars {
minimal_quote!(
let (@ meta_var) = match (@ meta_var).next() {
Some(_x) => crate::RepInterp(_x),
None => break,
};
)
.to_tokens(&mut while_body);
}
minimal_quote!(
(@ if let Some(sep) = sep_opt {
minimal_quote!(
if _i > 0 {
(@ minimal_quote!(crate::ToTokens::to_tokens(&crate::TokenTree::Punct(crate::Punct::new(
(@ TokenTree::from(Literal::character(sep.as_char()))),
(@ minimal_quote!(crate::Spacing::Alone)),
)), &mut ts);))
}
_i += 1;
)
} else {
minimal_quote!(();)
})
(@ quote(contents.clone())).to_tokens(&mut ts);
.to_tokens(&mut tokens);
minimal_quote!((@
quote(TokenStream::from(tree))
).to_tokens(&mut ts);
)
.to_tokens(&mut while_body);
rep_expanded.extend(vec![
TokenTree::Ident(Ident::new("while", Span::call_site())),
TokenTree::Ident(Ident::new("true", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Brace, while_body)),
]);

minimal_quote!((@ TokenTree::Group(Group::new(Delimiter::Brace, rep_expanded)))).to_tokens(&mut tokens);
.to_tokens(&mut tokens);
continue;
}
TokenTree::Ident(_) => {
minimal_quote!(crate::ToTokens::to_tokens(&(@ tree), &mut ts);)
.to_tokens(&mut tokens);
continue;
}
TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
TokenTree::Punct(ref tt) if tt.as_char() == '$' => {} // Escape `$` via `$$`.
_ => panic!(
"`$` must be followed by an ident or `$` or a repetition group in `quote!`"
),
Expand Down Expand Up @@ -450,20 +390,34 @@ pub fn quote(stream: TokenStream) -> TokenStream {
}
}

/// Helper function to support macro repetitions like `$( CONTENTS ) SEP_OPT *` in `quote!`.
/// Helper function to support macro repetitions like `$( CONTENTS ) SEP *` in `quote!`.
/// Recursively collects all `Ident`s (meta-variables) that follow a `$`
/// from the given `CONTENTS` stream, preserving their order of appearance.
fn collect_meta_vars(content_stream: TokenStream) -> Vec<Ident> {
fn helper(stream: TokenStream, out: &mut Vec<Ident>) {
let mut after_dollar = false;

let mut iter = stream.into_iter().peekable();
while let Some(tree) = iter.next() {
match &tree {
TokenTree::Punct(tt) if tt.as_char() == '$' => {
if let Some(TokenTree::Ident(id)) = iter.peek() {
while let Some(ref tree) = iter.next() {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is going to miss adjacent metavariables. For example this works:

let a = ['a'];
let b = ['b'];
eprintln!("{}", proc_macro::quote!($($a . $b)*));

but this does not:

eprintln!("{}", proc_macro::quote!($($a $b)*));

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

if after_dollar {
after_dollar = false;
match tree {
TokenTree::Ident(id) => {
out.push(id.clone());
iter.next();
continue;
}
// TokenTree::Punct(tt) if tt.as_char() == '$' => {} // Escape `$` via `$$`.
_ => {}
}
} else if let TokenTree::Punct(tt) = tree
&& tt.as_char() == '$'
{
after_dollar = true;
continue;
}

match tree {
TokenTree::Group(tt) => {
helper(tt.stream(), out);
}
Expand All @@ -477,6 +431,237 @@ fn collect_meta_vars(content_stream: TokenStream) -> Vec<Ident> {
vars
}

/// Consume a `$( CONTENTS ) SEP *` accordingly. It handles repetition by expanding `$( CONTENTS ) SEP *` to `{ REP_EXPANDED }`.
fn consume_dollar_group_sep_star(
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is accepting too many tokens in the "separator" in some cases.

let arr = ['a', 'b'];
eprintln!("{}", proc_macro::quote!($($arr) $$ x .. false [] *));
'a' $x .. false [] 'b'

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I introduced is_valid_sep_prefix to resolve this, but there might be a more elegant solution.

content_stream: TokenStream,
iter: &mut Peekable<<TokenStream as IntoIterator>::IntoIter>,
) -> TokenStream {
let mut tokens = TokenStream::new();

let mut current_contents: TokenStream = content_stream;
let mut sep_cand = Vec::new();
let mut is_sep_confirmed = false;
loop {
match (iter.next(), iter.peek().cloned()) {
// If a valid `*` is found, expand the current_contents and consume the `*`.
(Some(TokenTree::Punct(star)), opt)
if star.as_char() == '*' && !matches!(opt, Some(TokenTree::Punct(_))) =>
{
expand_dollar_group_sep_star(current_contents, TokenStream::from_iter(sep_cand))
.to_tokens(&mut tokens);
break;
}
(Some(TokenTree::Punct(star)), Some(TokenTree::Punct(not_assign)))
if star.as_char() == '*'
&& !(star.spacing() == Spacing::Joint && not_assign.as_char() == '=') =>
{
expand_dollar_group_sep_star(current_contents, TokenStream::from_iter(sep_cand))
.to_tokens(&mut tokens);
break;
}

// If the next `$( CONTENTS )` is found before the `*`, consume the current `$( CONTENTS ) SEP` literally.
// Then move to consume the next `$( CONTENTS ) SEP *` .
(Some(TokenTree::Punct(dollar)), Some(TokenTree::Group(next_group)))
if dollar.as_char() == '$' && next_group.delimiter() == Delimiter::Parenthesis =>
{
minimal_quote!(
crate::ToTokens::to_tokens(&TokenTree::from(Punct::new('$', Spacing::Joint)), &mut ts);
)
.to_tokens(&mut tokens);
minimal_quote!((@
quote(
TokenStream::from(TokenTree::Group(Group::new(Delimiter::Parenthesis, current_contents))))
).to_tokens(&mut ts);
)
.to_tokens(&mut tokens);
let sep = TokenStream::from_iter(sep_cand);
if !sep.is_empty() {
minimal_quote!((@
quote(sep)).to_tokens(&mut ts);
)
.to_tokens(&mut tokens);
}

// Move to consume the next `$( CONTENTS ) SEP *`.
current_contents = next_group.stream();
iter.next();
sep_cand = Vec::new();
is_sep_confirmed = false;
}

// Add the current token to the separator candidate until the separator is confirmed.
(Some(x), _) if !is_sep_confirmed => {
sep_cand.push(x.clone());
is_sep_confirmed = is_valid_sep(sep_cand.as_slice());

let mut current = Some(x);
while is_sep_confirmed {
if let Some(TokenTree::Punct(ref punct)) = current
&& punct.spacing() == Spacing::Joint
{
match iter.peek() {
Some(next) => {
sep_cand.push(next.clone());
is_sep_confirmed = is_valid_sep(sep_cand.as_slice());

if !is_sep_confirmed {
sep_cand.pop();
if let Some(TokenTree::Punct(ref p)) = sep_cand.pop() {
let mut new_p = Punct::new(p.as_char(), Spacing::Alone);
new_p.set_span(p.span());
sep_cand.push(TokenTree::Punct(new_p));
}

is_sep_confirmed = true;
break;
}
current = iter.next();
}
None => break,
}
} else {
break;
}
}
}

// Consume the current `$( CONTENTS ) SEP` literally without a `*`.
(x, _) => {
minimal_quote!(
crate::ToTokens::to_tokens(&TokenTree::from(Punct::new('$', Spacing::Joint)), &mut ts);
)
.to_tokens(&mut tokens);
minimal_quote!((@
quote(
TokenStream::from(TokenTree::Group(Group::new(Delimiter::Parenthesis, current_contents))))
).to_tokens(&mut ts);
)
.to_tokens(&mut tokens);
let sep = TokenStream::from_iter(sep_cand);
if !sep.is_empty() {
minimal_quote!((@
quote(sep)).to_tokens(&mut ts);
)
.to_tokens(&mut tokens);
}

// Recover the unconsumed token `x`.
let mut new_stream = x.into_iter().collect::<TokenStream>();
new_stream.extend(iter.by_ref());
*iter = new_stream.into_iter().peekable();
break;
}
}
}
tokens
}

/// Determine if the given token sequence is a valid separator.
fn is_valid_sep(ts: &[TokenTree]) -> bool {
match ts {
[TokenTree::Punct(t1), TokenTree::Punct(t2), TokenTree::Punct(t3)]
if t1.spacing() == Spacing::Joint && t2.spacing() == Spacing::Joint =>
{
matches!(
[t1.as_char(), t2.as_char(), t3.as_char()],
['.', '.', '.'] | ['.', '.', '='] | ['<', '<', '='] | ['>', '>', '=']
)
}
[TokenTree::Punct(t1), TokenTree::Punct(t2)]
if t1.spacing() == Spacing::Joint =>
{
matches!(
[t1.as_char(), t2.as_char()],
[':', ':'] | ['+', '='] | ['&', '&'] | ['&', '='] | ['^', '='] | ['/', '=']
| ['.', '.'] | ['=', '='] | ['>', '='] | ['<', '='] | ['*', '='] | ['!', '=']
| ['|', '='] | ['|', '|'] | ['-', '>'] | ['<', '-'] | ['%', '='] | ['=', '>']
| ['<', '<'] | ['>', '>'] | ['-', '=']
)
}
[TokenTree::Punct(single_quote), TokenTree::Ident(_)] // lifetime
if single_quote.as_char() == '\'' && single_quote.spacing() == Spacing::Joint =>
{
true
}
[TokenTree::Punct(t1)] =>
{
matches!(
t1.as_char(),
// The LEGAL_CHARS except '\'' are available here.
'#' | ',' | '.' | ';' | ':' | '+' | '@' | '!' | '^' | '&' | '/' |
'=' | '>' | '<' | '|' | '?' | '%' | '*' | '-' | '_' | '~'
)
}
[TokenTree::Ident(_)] | [TokenTree::Group(_)] | [TokenTree::Literal(_)] => true,
_ => false,
}
}

fn expand_dollar_group_sep_star(contents: TokenStream, sep: TokenStream) -> TokenStream {
let mut tokens = TokenStream::new();

let mut rep_expanded = TokenStream::new();

// Append setup code for a `while`, where recursively quoted `CONTENTS`
// and `SEP` are repeatedly processed, to `REP_EXPANDED`.
let meta_vars = collect_meta_vars(contents.clone());
minimal_quote!(
use crate::ext::*;
(@ if !sep.is_empty() {
Some(minimal_quote!(let mut _i = 0usize;))
} else {
None
})
let has_iter = crate::ThereIsNoIteratorInRepetition;
)
.to_tokens(&mut rep_expanded);
for meta_var in &meta_vars {
minimal_quote!(
#[allow(unused_mut)]
let (mut (@ meta_var), i) = (@ meta_var).quote_into_iter();
let has_iter = has_iter | i;
)
.to_tokens(&mut rep_expanded);
}
minimal_quote!(let _: crate::HasIterator = has_iter;).to_tokens(&mut rep_expanded);

// Append the `while` to `REP_EXPANDED`.
let mut while_body = TokenStream::new();
for meta_var in &meta_vars {
minimal_quote!(
let (@ meta_var) = match (@ meta_var).next() {
Some(_x) => crate::RepInterp(_x),
None => break,
};
)
.to_tokens(&mut while_body);
}
minimal_quote!(
(@ if !sep.is_empty() {
Some(minimal_quote!(
if _i > 0 {
(@ quote(sep)).to_tokens(&mut ts);
}
_i += 1;
))
} else {
None
})
(@ quote(contents)).to_tokens(&mut ts);
)
.to_tokens(&mut while_body);
rep_expanded.extend(vec![
TokenTree::Ident(Ident::new("while", Span::call_site())),
TokenTree::Ident(Ident::new("true", Span::call_site())),
TokenTree::Group(Group::new(Delimiter::Brace, while_body)),
]);

minimal_quote!((@ TokenTree::Group(Group::new(Delimiter::Brace, rep_expanded))))
.to_tokens(&mut tokens);
tokens
}

/// Quote a `Span` into a `TokenStream`.
/// This is needed to implement a custom quoter.
#[unstable(feature = "proc_macro_quote", issue = "54722")]
Expand Down
Loading
Loading