@@ -568,10 +568,9 @@ fn is_valid_sep(ts: &[TokenTree]) -> bool {
568568
569569fn expand_dollar_group_sep_star ( tokens : & mut TokenStream , contents : TokenStream , sep : TokenStream ) {
570570 let mut rep_expanded = TokenStream :: new ( ) ;
571-
572- // Append a setup code for a `while`, where recursively quoted `CONTENTS`
573- // and `SEP` are repeatedly processed, to `REP_EXPANDED`.
574571 let meta_vars = collect_meta_vars ( contents. clone ( ) ) ;
572+
573+ // Construct setup code for the `while_loop` where recursively quoted `CONTENTS` and `SEP` are repeatedly processed
575574 minimal_quote ! (
576575 use crate :: ext:: * ;
577576 ( @ if !sep. is_empty( ) {
@@ -592,8 +591,13 @@ fn expand_dollar_group_sep_star(tokens: &mut TokenStream, contents: TokenStream,
592591 }
593592 minimal_quote ! ( let _: crate :: HasIterator = has_iter; ) . to_tokens ( & mut rep_expanded) ;
594593
595- // Append the `while` to `REP_EXPANDED`.
594+ // Construct the `while_loop`
596595 let mut while_body = TokenStream :: new ( ) ;
596+ let while_loop = vec ! [
597+ TokenTree :: Ident ( Ident :: new( "while" , Span :: call_site( ) ) ) ,
598+ TokenTree :: Ident ( Ident :: new( "true" , Span :: call_site( ) ) ) ,
599+ TokenTree :: Group ( Group :: new( Delimiter :: Brace , while_body) ) ,
600+ ] ;
597601 for meta_var in & meta_vars {
598602 minimal_quote ! (
599603 let ( @ meta_var) = match ( @ meta_var) . next( ) {
@@ -619,12 +623,8 @@ fn expand_dollar_group_sep_star(tokens: &mut TokenStream, contents: TokenStream,
619623 ( @ quote( contents) ) . to_tokens( & mut ts) ;
620624 )
621625 . to_tokens ( & mut while_body) ;
622- rep_expanded. extend ( vec ! [
623- TokenTree :: Ident ( Ident :: new( "while" , Span :: call_site( ) ) ) ,
624- TokenTree :: Ident ( Ident :: new( "true" , Span :: call_site( ) ) ) ,
625- TokenTree :: Group ( Group :: new( Delimiter :: Brace , while_body) ) ,
626- ] ) ;
627626
627+ rep_expanded. extend ( while_loop) ;
628628 minimal_quote ! ( ( @ TokenTree :: Group ( Group :: new( Delimiter :: Brace , rep_expanded) ) ) )
629629 . to_tokens ( tokens) ;
630630}
0 commit comments