diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 8ee3d4d590cd3..ec207118c0989 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -1,7 +1,6 @@ //! Functions dealing with attributes and meta items. use std::fmt::Debug; -use std::iter; use std::sync::atomic::{AtomicU32, Ordering}; use rustc_index::bit_set::GrowableBitSet; @@ -17,7 +16,9 @@ use crate::ast::{ }; use crate::ptr::P; use crate::token::{self, CommentKind, Delimiter, Token}; -use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenTree}; +use crate::tokenstream::{ + DelimSpan, LazyAttrTokenStream, Spacing, TokenStream, TokenStreamIter, TokenTree, +}; use crate::util::comments; use crate::util::literal::escape_string_symbol; @@ -366,12 +367,9 @@ impl MetaItem { } } - fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option - where - I: Iterator, - { + fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { // FIXME: Share code with `parse_path`. - let tt = tokens.next().map(|tt| TokenTree::uninterpolate(tt)); + let tt = iter.next().map(|tt| TokenTree::uninterpolate(tt)); let path = match tt.as_deref() { Some(&TokenTree::Token( Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span }, @@ -379,9 +377,9 @@ impl MetaItem { )) => 'arm: { let mut segments = if let &token::Ident(name, _) = kind { if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - tokens.peek() + iter.peek() { - tokens.next(); + iter.next(); thin_vec![PathSegment::from_ident(Ident::new(name, span))] } else { break 'arm Path::from_ident(Ident::new(name, span)); @@ -391,16 +389,16 @@ impl MetaItem { }; loop { if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) = - tokens.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() + iter.next().map(|tt| TokenTree::uninterpolate(tt)).as_deref() { segments.push(PathSegment::from_ident(Ident::new(name, span))); } else { return None; } if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) = - tokens.peek() + iter.peek() { - tokens.next(); + iter.next(); } else { break; } @@ -421,8 +419,8 @@ impl MetaItem { } _ => return None, }; - let list_closing_paren_pos = tokens.peek().map(|tt| tt.span().hi()); - let kind = MetaItemKind::from_tokens(tokens)?; + let list_closing_paren_pos = iter.peek().map(|tt| tt.span().hi()); + let kind = MetaItemKind::from_tokens(iter)?; let hi = match &kind { MetaItemKind::NameValue(lit) => lit.span.hi(), MetaItemKind::List(..) => list_closing_paren_pos.unwrap_or(path.span.hi()), @@ -439,12 +437,12 @@ impl MetaItem { impl MetaItemKind { // public because it can be called in the hir pub fn list_from_tokens(tokens: TokenStream) -> Option> { - let mut tokens = tokens.trees().peekable(); + let mut iter = tokens.iter(); let mut result = ThinVec::new(); - while tokens.peek().is_some() { - let item = MetaItemInner::from_tokens(&mut tokens)?; + while iter.peek().is_some() { + let item = MetaItemInner::from_tokens(&mut iter)?; result.push(item); - match tokens.next() { + match iter.next() { None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {} _ => return None, } @@ -452,12 +450,10 @@ impl MetaItemKind { Some(result) } - fn name_value_from_tokens<'a>( - tokens: &mut impl Iterator, - ) -> Option { - match tokens.next() { + fn name_value_from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { + match iter.next() { Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => { - MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees()) + MetaItemKind::name_value_from_tokens(&mut inner_tokens.iter()) } Some(TokenTree::Token(token, _)) => { MetaItemLit::from_token(token).map(MetaItemKind::NameValue) @@ -466,19 +462,17 @@ impl MetaItemKind { } } - fn from_tokens<'a>( - tokens: &mut iter::Peekable>, - ) -> Option { - match tokens.peek() { + fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { + match iter.peek() { Some(TokenTree::Delimited(.., Delimiter::Parenthesis, inner_tokens)) => { let inner_tokens = inner_tokens.clone(); - tokens.next(); + iter.next(); MetaItemKind::list_from_tokens(inner_tokens).map(MetaItemKind::List) } Some(TokenTree::Delimited(..)) => None, Some(TokenTree::Token(Token { kind: token::Eq, .. }, _)) => { - tokens.next(); - MetaItemKind::name_value_from_tokens(tokens) + iter.next(); + MetaItemKind::name_value_from_tokens(iter) } _ => Some(MetaItemKind::Word), } @@ -594,22 +588,19 @@ impl MetaItemInner { self.meta_item().is_some() } - fn from_tokens<'a, I>(tokens: &mut iter::Peekable) -> Option - where - I: Iterator, - { - match tokens.peek() { + fn from_tokens(iter: &mut TokenStreamIter<'_>) -> Option { + match iter.peek() { Some(TokenTree::Token(token, _)) if let Some(lit) = MetaItemLit::from_token(token) => { - tokens.next(); + iter.next(); return Some(MetaItemInner::Lit(lit)); } Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => { - tokens.next(); - return MetaItemInner::from_tokens(&mut inner_tokens.trees().peekable()); + iter.next(); + return MetaItemInner::from_tokens(&mut inner_tokens.iter()); } _ => {} } - MetaItem::from_tokens(tokens).map(MetaItemInner::MetaItem) + MetaItem::from_tokens(iter).map(MetaItemInner::MetaItem) } } diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index c6b6addc946e8..e7b393d869d2b 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -99,7 +99,7 @@ where CTX: crate::HashStableContext, { fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { - for sub_tt in self.trees() { + for sub_tt in self.iter() { sub_tt.hash_stable(hcx, hasher); } } @@ -406,7 +406,7 @@ impl Eq for TokenStream {} impl PartialEq for TokenStream { fn eq(&self, other: &TokenStream) -> bool { - self.trees().eq(other.trees()) + self.iter().eq(other.iter()) } } @@ -423,24 +423,24 @@ impl TokenStream { self.0.len() } - pub fn trees(&self) -> RefTokenTreeCursor<'_> { - RefTokenTreeCursor::new(self) + pub fn get(&self, index: usize) -> Option<&TokenTree> { + self.0.get(index) } - pub fn into_trees(self) -> TokenTreeCursor { - TokenTreeCursor::new(self) + pub fn iter(&self) -> TokenStreamIter<'_> { + TokenStreamIter::new(self) } /// Compares two `TokenStream`s, checking equality without regarding span information. pub fn eq_unspanned(&self, other: &TokenStream) -> bool { - let mut t1 = self.trees(); - let mut t2 = other.trees(); - for (t1, t2) in iter::zip(&mut t1, &mut t2) { - if !t1.eq_unspanned(t2) { + let mut iter1 = self.iter(); + let mut iter2 = other.iter(); + for (tt1, tt2) in iter::zip(&mut iter1, &mut iter2) { + if !tt1.eq_unspanned(tt2) { return false; } } - t1.next().is_none() && t2.next().is_none() + iter1.next().is_none() && iter2.next().is_none() } /// Create a token stream containing a single token with alone spacing. The @@ -509,7 +509,7 @@ impl TokenStream { #[must_use] pub fn flattened(&self) -> TokenStream { fn can_skip(stream: &TokenStream) -> bool { - stream.trees().all(|tree| match tree { + stream.iter().all(|tree| match tree { TokenTree::Token(token, _) => !matches!( token.kind, token::NtIdent(..) | token::NtLifetime(..) | token::Interpolated(..) @@ -522,7 +522,7 @@ impl TokenStream { return self.clone(); } - self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect() + self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect() } // If `vec` is not empty, try to glue `tt` onto its last token. The return @@ -665,25 +665,26 @@ impl TokenStream { } } -/// By-reference iterator over a [`TokenStream`], that produces `&TokenTree` -/// items. #[derive(Clone)] -pub struct RefTokenTreeCursor<'t> { +pub struct TokenStreamIter<'t> { stream: &'t TokenStream, index: usize, } -impl<'t> RefTokenTreeCursor<'t> { +impl<'t> TokenStreamIter<'t> { fn new(stream: &'t TokenStream) -> Self { - RefTokenTreeCursor { stream, index: 0 } + TokenStreamIter { stream, index: 0 } } - pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { - self.stream.0.get(self.index + n) + // Peeking could be done via `Peekable`, but most iterators need peeking, + // and this is simple and avoids the need to use `peekable` and `Peekable` + // at all the use sites. + pub fn peek(&self) -> Option<&'t TokenTree> { + self.stream.0.get(self.index) } } -impl<'t> Iterator for RefTokenTreeCursor<'t> { +impl<'t> Iterator for TokenStreamIter<'t> { type Item = &'t TokenTree; fn next(&mut self) -> Option<&'t TokenTree> { @@ -694,39 +695,6 @@ impl<'t> Iterator for RefTokenTreeCursor<'t> { } } -/// Owning by-value iterator over a [`TokenStream`], that produces `&TokenTree` -/// items. -/// -/// Doesn't impl `Iterator` because Rust doesn't permit an owning iterator to -/// return `&T` from `next`; the need for an explicit lifetime in the `Item` -/// associated type gets in the way. Instead, use `next_ref` (which doesn't -/// involve associated types) for getting individual elements, or -/// `RefTokenTreeCursor` if you really want an `Iterator`, e.g. in a `for` -/// loop. -#[derive(Clone, Debug)] -pub struct TokenTreeCursor { - pub stream: TokenStream, - index: usize, -} - -impl TokenTreeCursor { - fn new(stream: TokenStream) -> Self { - TokenTreeCursor { stream, index: 0 } - } - - #[inline] - pub fn next_ref(&mut self) -> Option<&TokenTree> { - self.stream.0.get(self.index).map(|tree| { - self.index += 1; - tree - }) - } - - pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { - self.stream.0.get(self.index + n) - } -} - #[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] pub struct DelimSpan { pub open: Span, diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index cbb92c8c30f09..aa551d2f07d40 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -725,7 +725,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere // E.g. we have seen cases where a proc macro can handle `a :: b` but not // `a::b`. See #117433 for some examples. fn print_tts(&mut self, tts: &TokenStream, convert_dollar_crate: bool) { - let mut iter = tts.trees().peekable(); + let mut iter = tts.iter().peekable(); while let Some(tt) = iter.next() { let spacing = self.print_tt(tt, convert_dollar_crate); if let Some(next) = iter.peek() { diff --git a/compiler/rustc_builtin_macros/src/concat_idents.rs b/compiler/rustc_builtin_macros/src/concat_idents.rs index b459cc3007d37..532f625962718 100644 --- a/compiler/rustc_builtin_macros/src/concat_idents.rs +++ b/compiler/rustc_builtin_macros/src/concat_idents.rs @@ -19,7 +19,7 @@ pub(crate) fn expand_concat_idents<'cx>( } let mut res_str = String::new(); - for (i, e) in tts.trees().enumerate() { + for (i, e) in tts.iter().enumerate() { if i & 1 == 1 { match e { TokenTree::Token(Token { kind: token::Comma, .. }, _) => {} diff --git a/compiler/rustc_builtin_macros/src/trace_macros.rs b/compiler/rustc_builtin_macros/src/trace_macros.rs index e624d1da66bd5..01e4c403203be 100644 --- a/compiler/rustc_builtin_macros/src/trace_macros.rs +++ b/compiler/rustc_builtin_macros/src/trace_macros.rs @@ -10,9 +10,9 @@ pub(crate) fn expand_trace_macros( sp: Span, tt: TokenStream, ) -> MacroExpanderResult<'static> { - let mut cursor = tt.trees(); + let mut iter = tt.iter(); let mut err = false; - let value = match &cursor.next() { + let value = match iter.next() { Some(TokenTree::Token(token, _)) if token.is_keyword(kw::True) => true, Some(TokenTree::Token(token, _)) if token.is_keyword(kw::False) => false, _ => { @@ -20,7 +20,7 @@ pub(crate) fn expand_trace_macros( false } }; - err |= cursor.next().is_some(); + err |= iter.next().is_some(); if err { cx.dcx().emit_err(errors::TraceMacros { span: sp }); } else { diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs index 810a5d30c7ec4..b4453c3cd2622 100644 --- a/compiler/rustc_expand/src/mbe/metavar_expr.rs +++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs @@ -1,5 +1,5 @@ use rustc_ast::token::{self, Delimiter, IdentIsRaw, Lit, Token, TokenKind}; -use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree}; use rustc_ast::{LitIntType, LitKind}; use rustc_ast_pretty::pprust; use rustc_errors::{Applicability, PResult}; @@ -39,14 +39,14 @@ impl MetaVarExpr { outer_span: Span, psess: &'psess ParseSess, ) -> PResult<'psess, MetaVarExpr> { - let mut tts = input.trees(); - let ident = parse_ident(&mut tts, psess, outer_span)?; - let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = tts.next() else { + let mut iter = input.iter(); + let ident = parse_ident(&mut iter, psess, outer_span)?; + let Some(TokenTree::Delimited(.., Delimiter::Parenthesis, args)) = iter.next() else { let msg = "meta-variable expression parameter must be wrapped in parentheses"; return Err(psess.dcx().struct_span_err(ident.span, msg)); }; - check_trailing_token(&mut tts, psess)?; - let mut iter = args.trees(); + check_trailing_token(&mut iter, psess)?; + let mut iter = args.iter(); let rslt = match ident.as_str() { "concat" => { let mut result = Vec::new(); @@ -74,7 +74,7 @@ impl MetaVarExpr { } }; result.push(element); - if iter.look_ahead(0).is_none() { + if iter.peek().is_none() { break; } if !try_eat_comma(&mut iter) { @@ -143,7 +143,7 @@ pub(crate) enum MetaVarExprConcatElem { // Checks if there are any remaining tokens. For example, `${ignore(ident ... a b c ...)}` fn check_trailing_token<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, ) -> PResult<'psess, ()> { if let Some(tt) = iter.next() { @@ -159,14 +159,14 @@ fn check_trailing_token<'psess>( /// Parse a meta-variable `count` expression: `count(ident[, depth])` fn parse_count<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, span: Span, ) -> PResult<'psess, MetaVarExpr> { eat_dollar(iter, psess, span)?; let ident = parse_ident(iter, psess, span)?; let depth = if try_eat_comma(iter) { - if iter.look_ahead(0).is_none() { + if iter.peek().is_none() { return Err(psess.dcx().struct_span_err( span, "`count` followed by a comma must have an associated index indicating its depth", @@ -181,7 +181,7 @@ fn parse_count<'psess>( /// Parses the depth used by index(depth) and len(depth). fn parse_depth<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, span: Span, ) -> PResult<'psess, usize> { @@ -204,7 +204,7 @@ fn parse_depth<'psess>( /// Parses an generic ident fn parse_ident<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, fallback_span: Span, ) -> PResult<'psess, Ident> { @@ -236,7 +236,7 @@ fn parse_ident_from_token<'psess>( } fn parse_token<'psess, 't>( - iter: &mut RefTokenTreeCursor<'t>, + iter: &mut TokenStreamIter<'t>, psess: &'psess ParseSess, fallback_span: Span, ) -> PResult<'psess, &'t Token> { @@ -251,8 +251,8 @@ fn parse_token<'psess, 't>( /// Tries to move the iterator forward returning `true` if there is a comma. If not, then the /// iterator is not modified and the result is `false`. -fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool { - if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.look_ahead(0) { +fn try_eat_comma(iter: &mut TokenStreamIter<'_>) -> bool { + if let Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) = iter.peek() { let _ = iter.next(); return true; } @@ -261,8 +261,8 @@ fn try_eat_comma(iter: &mut RefTokenTreeCursor<'_>) -> bool { /// Tries to move the iterator forward returning `true` if there is a dollar sign. If not, then the /// iterator is not modified and the result is `false`. -fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool { - if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) { +fn try_eat_dollar(iter: &mut TokenStreamIter<'_>) -> bool { + if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.peek() { let _ = iter.next(); return true; } @@ -271,12 +271,11 @@ fn try_eat_dollar(iter: &mut RefTokenTreeCursor<'_>) -> bool { /// Expects that the next item is a dollar sign. fn eat_dollar<'psess>( - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, psess: &'psess ParseSess, span: Span, ) -> PResult<'psess, ()> { - if let Some(TokenTree::Token(Token { kind: token::Dollar, .. }, _)) = iter.look_ahead(0) { - let _ = iter.next(); + if try_eat_dollar(iter) { return Ok(()); } Err(psess.dcx().struct_span_err( diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 36094707faceb..82f2dd33883a7 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -1,4 +1,5 @@ use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token}; +use rustc_ast::tokenstream::TokenStreamIter; use rustc_ast::{NodeId, tokenstream}; use rustc_ast_pretty::pprust; use rustc_feature::Features; @@ -49,25 +50,25 @@ pub(super) fn parse( // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming // additional trees if need be. - let mut trees = input.trees().peekable(); - while let Some(tree) = trees.next() { + let mut iter = input.iter(); + while let Some(tree) = iter.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). - let tree = parse_tree(tree, &mut trees, parsing_patterns, sess, node_id, features, edition); + let tree = parse_tree(tree, &mut iter, parsing_patterns, sess, node_id, features, edition); match tree { TokenTree::MetaVar(start_sp, ident) if parsing_patterns => { // Not consuming the next token immediately, as it may not be a colon - let span = match trees.peek() { + let span = match iter.peek() { Some(&tokenstream::TokenTree::Token( Token { kind: token::Colon, span: colon_span }, _, )) => { // Consume the colon first - trees.next(); + iter.next(); // It's ok to consume the next tree no matter how, // since if it's not a token then it will be an invalid declaration. - match trees.next() { + match iter.next() { Some(tokenstream::TokenTree::Token(token, _)) => match token.ident() { Some((fragment, _)) => { let span = token.span.with_lo(start_sp.lo()); @@ -143,14 +144,14 @@ fn maybe_emit_macro_metavar_expr_concat_feature(features: &Features, sess: &Sess /// # Parameters /// /// - `tree`: the tree we wish to convert. -/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish +/// - `outer_iter`: an iterator over trees. We may need to read more tokens from it in order to finish /// converting `tree` /// - `parsing_patterns`: same as [parse]. /// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `features`: language features so we can do feature gating. fn parse_tree<'a>( tree: &'a tokenstream::TokenTree, - outer_trees: &mut impl Iterator, + outer_iter: &mut TokenStreamIter<'a>, parsing_patterns: bool, sess: &Session, node_id: NodeId, @@ -163,15 +164,16 @@ fn parse_tree<'a>( &tokenstream::TokenTree::Token(Token { kind: token::Dollar, span: dollar_span }, _) => { // FIXME: Handle `Invisible`-delimited groups in a more systematic way // during parsing. - let mut next = outer_trees.next(); - let mut trees: Box>; - match next { + let mut next = outer_iter.next(); + let mut iter_storage; + let mut iter: &mut TokenStreamIter<'_> = match next { Some(tokenstream::TokenTree::Delimited(.., delim, tts)) if delim.skip() => { - trees = Box::new(tts.trees()); - next = trees.next(); + iter_storage = tts.iter(); + next = iter_storage.next(); + &mut iter_storage } - _ => trees = Box::new(outer_trees), - } + _ => outer_iter, + }; match next { // `tree` is followed by a delimited set of token trees. @@ -230,7 +232,7 @@ fn parse_tree<'a>( let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition); // Get the Kleene operator and optional separator let (separator, kleene) = - parse_sep_and_kleene_op(&mut trees, delim_span.entire(), sess); + parse_sep_and_kleene_op(&mut iter, delim_span.entire(), sess); // Count the number of captured "names" (i.e., named metavars) let num_captures = if parsing_patterns { count_metavar_decls(&sequence) } else { 0 }; @@ -313,11 +315,11 @@ fn kleene_op(token: &Token) -> Option { /// - Ok(Ok((op, span))) if the next token tree is a KleeneOp /// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp /// - Err(span) if the next token tree is not a token -fn parse_kleene_op<'a>( - input: &mut impl Iterator, +fn parse_kleene_op( + iter: &mut TokenStreamIter<'_>, span: Span, ) -> Result, Span> { - match input.next() { + match iter.next() { Some(tokenstream::TokenTree::Token(token, _)) => match kleene_op(token) { Some(op) => Ok(Ok((op, token.span))), None => Ok(Err(token.clone())), @@ -334,22 +336,22 @@ fn parse_kleene_op<'a>( /// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some /// stream of tokens in an invocation of a macro. /// -/// This function will take some input iterator `input` corresponding to `span` and a parsing -/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene +/// This function will take some input iterator `iter` corresponding to `span` and a parsing +/// session `sess`. If the next one (or possibly two) tokens in `iter` correspond to a Kleene /// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an /// error with the appropriate span is emitted to `sess` and a dummy value is returned. -fn parse_sep_and_kleene_op<'a>( - input: &mut impl Iterator, +fn parse_sep_and_kleene_op( + iter: &mut TokenStreamIter<'_>, span: Span, sess: &Session, ) -> (Option, KleeneToken) { // We basically look at two token trees here, denoted as #1 and #2 below - let span = match parse_kleene_op(input, span) { + let span = match parse_kleene_op(iter, span) { // #1 is a `?`, `+`, or `*` KleeneOp Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)), // #1 is a separator followed by #2, a KleeneOp - Ok(Err(token)) => match parse_kleene_op(input, token.span) { + Ok(Err(token)) => match parse_kleene_op(iter, token.span) { // #2 is the `?` Kleene op, which does not take a separator (error) Ok(Ok((KleeneOp::ZeroOrOne, span))) => { // Error! diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 263df235b3ecc..bb201089fa54a 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -112,9 +112,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec { let delimiter = pm::Delimiter::from_internal(delim); diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index d43fe27aa0366..3b8bc66bab7d4 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -1829,7 +1829,7 @@ impl KeywordIdents { fn check_tokens(&mut self, cx: &EarlyContext<'_>, tokens: &TokenStream) { // Check if the preceding token is `$`, because we want to allow `$async`, etc. let mut prev_dollar = false; - for tt in tokens.trees() { + for tt in tokens.iter() { match tt { // Only report non-raw idents. TokenTree::Token(token, _) => { diff --git a/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs b/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs index 23f4f7289067d..ce280fef8b623 100644 --- a/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs +++ b/compiler/rustc_lint/src/macro_expr_fragment_specifier_2024_migration.rs @@ -84,7 +84,7 @@ impl Expr2024 { let mut prev_colon = false; let mut prev_identifier = false; let mut prev_dollar = false; - for tt in tokens.trees() { + for tt in tokens.iter() { debug!( "check_tokens: {:?} - colon {prev_dollar} - ident {prev_identifier} - colon {prev_colon}", tt diff --git a/compiler/rustc_mir_dataflow/src/framework/visitor.rs b/compiler/rustc_mir_dataflow/src/framework/visitor.rs index d18e9fa33f0c4..a03aecee7be12 100644 --- a/compiler/rustc_mir_dataflow/src/framework/visitor.rs +++ b/compiler/rustc_mir_dataflow/src/framework/visitor.rs @@ -35,7 +35,6 @@ where { fn visit_block_start(&mut self, _state: &A::Domain) {} - /// // njn: grep for "before", "primary", etc. /// Called after the "early" effect of the given statement is applied to `state`. fn visit_after_early_statement_effect( &mut self, diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index a2136399b0c82..fe5df04deeb67 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -8,6 +8,7 @@ use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, MatchKind, Pat, Path, PathSegment, Recovered}; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; +use rustc_ast::tokenstream::TokenTree; use rustc_ast::util::case::Case; use rustc_ast::util::classify; use rustc_ast::util::parser::{AssocOp, ExprPrecedence, Fixity, prec_let_scrutinee_needs_par}; @@ -2393,7 +2394,8 @@ impl<'a> Parser<'a> { } if self.token == TokenKind::Semi - && matches!(self.token_cursor.stack.last(), Some((.., Delimiter::Parenthesis))) + && let Some(last) = self.token_cursor.stack.last() + && let Some(TokenTree::Delimited(_, _, Delimiter::Parenthesis, _)) = last.curr() && self.may_recover() { // It is likely that the closure body is a block but where the diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 976ffe608a2ff..b85968a555d5a 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -24,9 +24,7 @@ use rustc_ast::ptr::P; use rustc_ast::token::{ self, Delimiter, IdentIsRaw, InvisibleOrigin, MetaVarKind, Nonterminal, Token, TokenKind, }; -use rustc_ast::tokenstream::{ - AttrsTarget, DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree, TokenTreeCursor, -}; +use rustc_ast::tokenstream::{AttrsTarget, Spacing, TokenStream, TokenTree}; use rustc_ast::util::case::Case; use rustc_ast::{ self as ast, AnonConst, AttrArgs, AttrId, ByRef, Const, CoroutineKind, DUMMY_NODE_ID, @@ -273,21 +271,48 @@ struct CaptureState { seen_attrs: IntervalSet, } -/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that +#[derive(Clone, Debug)] +struct TokenTreeCursor { + stream: TokenStream, + /// Points to the current token tree in the stream. In `TokenCursor::curr`, + /// this can be any token tree. In `TokenCursor::stack`, this is always a + /// `TokenTree::Delimited`. + index: usize, +} + +impl TokenTreeCursor { + #[inline] + fn new(stream: TokenStream) -> Self { + TokenTreeCursor { stream, index: 0 } + } + + #[inline] + fn curr(&self) -> Option<&TokenTree> { + self.stream.get(self.index) + } + + #[inline] + fn bump(&mut self) { + self.index += 1; + } +} + +/// A `TokenStream` cursor that produces `Token`s. It's a bit odd that /// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b) /// use this type to emit them as a linear sequence. But a linear sequence is /// what the parser expects, for the most part. #[derive(Clone, Debug)] struct TokenCursor { - // Cursor for the current (innermost) token stream. The delimiters for this - // token stream are found in `self.stack.last()`; when that is `None` then - // we are in the outermost token stream which never has delimiters. - tree_cursor: TokenTreeCursor, - - // Token streams surrounding the current one. The delimiters for stack[n]'s - // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters - // because it's the outermost token stream which never has delimiters. - stack: Vec<(TokenTreeCursor, DelimSpan, DelimSpacing, Delimiter)>, + // Cursor for the current (innermost) token stream. The index within the + // cursor can point to any token tree in the stream (or one past the end). + // The delimiters for this token stream are found in `self.stack.last()`; + // if that is `None` we are in the outermost token stream which never has + // delimiters. + curr: TokenTreeCursor, + + // Token streams surrounding the current one. The index within each cursor + // always points to a `TokenTree::Delimited`. + stack: Vec, } impl TokenCursor { @@ -302,32 +327,33 @@ impl TokenCursor { // FIXME: we currently don't return `Delimiter::Invisible` open/close delims. To fix // #67062 we will need to, whereupon the `delim != Delimiter::Invisible` conditions // below can be removed. - if let Some(tree) = self.tree_cursor.next_ref() { + if let Some(tree) = self.curr.curr() { match tree { &TokenTree::Token(ref token, spacing) => { debug_assert!(!matches!( token.kind, token::OpenDelim(_) | token::CloseDelim(_) )); - return (token.clone(), spacing); + let res = (token.clone(), spacing); + self.curr.bump(); + return res; } &TokenTree::Delimited(sp, spacing, delim, ref tts) => { - let trees = tts.clone().into_trees(); - self.stack.push(( - mem::replace(&mut self.tree_cursor, trees), - sp, - spacing, - delim, - )); + let trees = TokenTreeCursor::new(tts.clone()); + self.stack.push(mem::replace(&mut self.curr, trees)); if !delim.skip() { return (Token::new(token::OpenDelim(delim), sp.open), spacing.open); } // No open delimiter to return; continue on to the next iteration. } }; - } else if let Some((tree_cursor, span, spacing, delim)) = self.stack.pop() { + } else if let Some(parent) = self.stack.pop() { // We have exhausted this token stream. Move back to its parent token stream. - self.tree_cursor = tree_cursor; + let Some(&TokenTree::Delimited(span, spacing, delim, _)) = parent.curr() else { + panic!("parent should be Delimited") + }; + self.curr = parent; + self.curr.bump(); // move past the `Delimited` if !delim.skip() { return (Token::new(token::CloseDelim(delim), span.close), spacing.close); } @@ -466,7 +492,7 @@ impl<'a> Parser<'a> { capture_cfg: false, restrictions: Restrictions::empty(), expected_tokens: Vec::new(), - token_cursor: TokenCursor { tree_cursor: stream.into_trees(), stack: Vec::new() }, + token_cursor: TokenCursor { curr: TokenTreeCursor::new(stream), stack: Vec::new() }, num_bump_calls: 0, break_last_token: 0, unmatched_angle_bracket_count: 0, @@ -1192,7 +1218,7 @@ impl<'a> Parser<'a> { if dist == 1 { // The index is zero because the tree cursor's index always points // to the next token to be gotten. - match self.token_cursor.tree_cursor.look_ahead(0) { + match self.token_cursor.curr.curr() { Some(tree) => { // Indexing stayed within the current token tree. match tree { @@ -1202,12 +1228,13 @@ impl<'a> Parser<'a> { return looker(&Token::new(token::OpenDelim(delim), dspan.open)); } } - }; + } } None => { // The tree cursor lookahead went (one) past the end of the // current token tree. Try to return a close delimiter. - if let Some(&(_, span, _, delim)) = self.token_cursor.stack.last() + if let Some(last) = self.token_cursor.stack.last() + && let Some(&TokenTree::Delimited(span, _, delim, _)) = last.curr() && !delim.skip() { // We are not in the outermost token stream, so we have @@ -1399,9 +1426,10 @@ impl<'a> Parser<'a> { pub fn parse_token_tree(&mut self) -> TokenTree { match self.token.kind { token::OpenDelim(..) => { - // Grab the tokens within the delimiters. - let stream = self.token_cursor.tree_cursor.stream.clone(); - let (_, span, spacing, delim) = *self.token_cursor.stack.last().unwrap(); + // Clone the `TokenTree::Delimited` that we are currently + // within. That's what we are going to return. + let tree = self.token_cursor.stack.last().unwrap().curr().unwrap().clone(); + debug_assert_matches!(tree, TokenTree::Delimited(..)); // Advance the token cursor through the entire delimited // sequence. After getting the `OpenDelim` we are *within* the @@ -1421,7 +1449,7 @@ impl<'a> Parser<'a> { // Consume close delimiter self.bump(); - TokenTree::Delimited(span, spacing, delim, stream) + tree } token::CloseDelim(_) | token::Eof => unreachable!(), _ => { diff --git a/compiler/rustc_parse/src/parser/tests.rs b/compiler/rustc_parse/src/parser/tests.rs index 1813960dad05f..da1ff8f1273ca 100644 --- a/compiler/rustc_parse/src/parser/tests.rs +++ b/compiler/rustc_parse/src/parser/tests.rs @@ -2285,7 +2285,7 @@ fn bad_path_expr_1() { fn string_to_tts_macro() { create_default_session_globals_then(|| { let stream = string_to_stream("macro_rules! zip (($a)=>($a))".to_string()); - let tts = &stream.trees().collect::>()[..]; + let tts = &stream.iter().collect::>()[..]; match tts { [ @@ -2297,14 +2297,14 @@ fn string_to_tts_macro() { TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _), TokenTree::Delimited(.., macro_delim, macro_tts), ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => { - let tts = ¯o_tts.trees().collect::>(); + let tts = ¯o_tts.iter().collect::>(); match &tts[..] { [ TokenTree::Delimited(.., first_delim, first_tts), TokenTree::Token(Token { kind: token::FatArrow, .. }, _), TokenTree::Delimited(.., second_delim, second_tts), ] if macro_delim == &Delimiter::Parenthesis => { - let tts = &first_tts.trees().collect::>(); + let tts = &first_tts.iter().collect::>(); match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), @@ -2316,7 +2316,7 @@ fn string_to_tts_macro() { } _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), } - let tts = &second_tts.trees().collect::>(); + let tts = &second_tts.iter().collect::>(); match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), @@ -2544,7 +2544,7 @@ fn ttdelim_span() { .unwrap(); let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") }; - let span = mac.args.tokens.trees().last().unwrap().span(); + let span = mac.args.tokens.iter().last().unwrap().span(); match psess.source_map().span_to_snippet(span) { Ok(s) => assert_eq!(&s[..], "{ body }"), diff --git a/compiler/rustc_parse/src/parser/tokenstream/tests.rs b/compiler/rustc_parse/src/parser/tokenstream/tests.rs index b13b68c266a0e..037b5b1a9de5d 100644 --- a/compiler/rustc_parse/src/parser/tokenstream/tests.rs +++ b/compiler/rustc_parse/src/parser/tokenstream/tests.rs @@ -23,8 +23,8 @@ fn test_concat() { let mut eq_res = TokenStream::default(); eq_res.push_stream(test_fst); eq_res.push_stream(test_snd); - assert_eq!(test_res.trees().count(), 5); - assert_eq!(eq_res.trees().count(), 5); + assert_eq!(test_res.iter().count(), 5); + assert_eq!(eq_res.iter().count(), 5); assert_eq!(test_res.eq_unspanned(&eq_res), true); }) } @@ -33,7 +33,7 @@ fn test_concat() { fn test_to_from_bijection() { create_default_session_globals_then(|| { let test_start = string_to_ts("foo::bar(baz)"); - let test_end = test_start.trees().cloned().collect(); + let test_end = test_start.iter().cloned().collect(); assert_eq!(test_start, test_end) }) } @@ -105,6 +105,6 @@ fn test_dotdotdot() { stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2))); stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3))); assert!(stream.eq_unspanned(&string_to_ts("..."))); - assert_eq!(stream.trees().count(), 1); + assert_eq!(stream.iter().count(), 1); }) } diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 9903d0faf4382..e6cb1165c0632 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -2604,7 +2604,7 @@ fn filter_tokens_from_list( ) -> Vec { let mut tokens = Vec::with_capacity(args_tokens.len()); let mut skip_next_comma = false; - for token in args_tokens.trees() { + for token in args_tokens.iter() { match token { TokenTree::Token(Token { kind: TokenKind::Comma, .. }, _) if skip_next_comma => { skip_next_comma = false; diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs index d39ecf83ac01c..3cc5f8d615a4e 100644 --- a/src/librustdoc/clean/render_macro_matchers.rs +++ b/src/librustdoc/clean/render_macro_matchers.rs @@ -131,7 +131,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) { use State::*; let mut state = Start; - for tt in tts.trees() { + for tt in tts.iter() { let (needs_space, next_state) = match &tt { TokenTree::Token(tt, _) => match (state, &tt.kind) { (Dollar, token::Ident(..)) => (false, DollarIdent), diff --git a/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs b/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs index b4ed8a68a325a..fd27e30a67f3b 100644 --- a/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs +++ b/src/tools/clippy/clippy_lints/src/attrs/should_panic_without_expect.rs @@ -15,7 +15,7 @@ pub(super) fn check(cx: &EarlyContext<'_>, attr: &Attribute) { } if let AttrArgs::Delimited(args) = &normal_attr.item.args - && let mut tt_iter = args.tokens.trees() + && let mut tt_iter = args.tokens.iter() && let Some(TokenTree::Token( Token { kind: TokenKind::Ident(sym::expected, _), diff --git a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs index c8f814137289c..7d86bd3e540a1 100644 --- a/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs +++ b/src/tools/clippy/clippy_lints/src/crate_in_macro_def.rs @@ -82,11 +82,11 @@ fn is_macro_export(attr: &Attribute) -> bool { fn contains_unhygienic_crate_reference(tts: &TokenStream) -> Option { let mut prev_is_dollar = false; - let mut cursor = tts.trees(); - while let Some(curr) = cursor.next() { + let mut iter = tts.iter(); + while let Some(curr) = iter.next() { if !prev_is_dollar && let Some(span) = is_crate_keyword(curr) - && let Some(next) = cursor.look_ahead(0) + && let Some(next) = iter.peek() && is_token(next, &TokenKind::PathSep) { return Some(span); diff --git a/src/tools/rustfmt/src/macros.rs b/src/tools/rustfmt/src/macros.rs index 4083d9398f6bc..ea8ca38cb7725 100644 --- a/src/tools/rustfmt/src/macros.rs +++ b/src/tools/rustfmt/src/macros.rs @@ -13,7 +13,7 @@ use std::collections::HashMap; use std::panic::{AssertUnwindSafe, catch_unwind}; use rustc_ast::token::{BinOpToken, Delimiter, Token, TokenKind}; -use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree}; +use rustc_ast::tokenstream::{TokenStream, TokenStreamIter, TokenTree}; use rustc_ast::{ast, ptr}; use rustc_ast_pretty::pprust; use rustc_span::{ @@ -443,7 +443,7 @@ pub(crate) fn rewrite_macro_def( } let ts = def.body.tokens.clone(); - let mut parser = MacroParser::new(ts.trees()); + let mut parser = MacroParser::new(ts.iter()); let parsed_def = match parser.parse() { Some(def) => def, None => return snippet, @@ -794,7 +794,7 @@ impl MacroArgParser { self.buf.clear(); } - fn add_meta_variable(&mut self, iter: &mut RefTokenTreeCursor<'_>) -> Option<()> { + fn add_meta_variable(&mut self, iter: &mut TokenStreamIter<'_>) -> Option<()> { match iter.next() { Some(&TokenTree::Token( Token { @@ -826,7 +826,7 @@ impl MacroArgParser { &mut self, inner: Vec, delim: Delimiter, - iter: &mut RefTokenTreeCursor<'_>, + iter: &mut TokenStreamIter<'_>, ) -> Option<()> { let mut buffer = String::new(); let mut first = true; @@ -926,7 +926,7 @@ impl MacroArgParser { /// Returns a collection of parsed macro def's arguments. fn parse(mut self, tokens: TokenStream) -> Option> { - let mut iter = tokens.trees(); + let mut iter = tokens.iter(); while let Some(tok) = iter.next() { match tok { @@ -1063,7 +1063,7 @@ fn format_macro_args( } fn span_for_token_stream(token_stream: &TokenStream) -> Option { - token_stream.trees().next().map(|tt| tt.span()) + token_stream.iter().next().map(|tt| tt.span()) } // We should insert a space if the next token is a: @@ -1179,18 +1179,18 @@ pub(crate) fn macro_style(mac: &ast::MacCall, context: &RewriteContext<'_>) -> D // A very simple parser that just parses a macros 2.0 definition into its branches. // Currently we do not attempt to parse any further than that. struct MacroParser<'a> { - toks: RefTokenTreeCursor<'a>, + iter: TokenStreamIter<'a>, } impl<'a> MacroParser<'a> { - const fn new(toks: RefTokenTreeCursor<'a>) -> Self { - Self { toks } + const fn new(iter: TokenStreamIter<'a>) -> Self { + Self { iter } } // (`(` ... `)` `=>` `{` ... `}`)* fn parse(&mut self) -> Option { let mut branches = vec![]; - while self.toks.look_ahead(1).is_some() { + while self.iter.peek().is_some() { branches.push(self.parse_branch()?); } @@ -1199,13 +1199,13 @@ impl<'a> MacroParser<'a> { // `(` ... `)` `=>` `{` ... `}` fn parse_branch(&mut self) -> Option { - let tok = self.toks.next()?; + let tok = self.iter.next()?; let (lo, args_paren_kind) = match tok { TokenTree::Token(..) => return None, &TokenTree::Delimited(delimited_span, _, d, _) => (delimited_span.open.lo(), d), }; let args = TokenStream::new(vec![tok.clone()]); - match self.toks.next()? { + match self.iter.next()? { TokenTree::Token( Token { kind: TokenKind::FatArrow, @@ -1215,7 +1215,7 @@ impl<'a> MacroParser<'a> { ) => {} _ => return None, } - let (mut hi, body, whole_body) = match self.toks.next()? { + let (mut hi, body, whole_body) = match self.iter.next()? { TokenTree::Token(..) => return None, TokenTree::Delimited(delimited_span, ..) => { let data = delimited_span.entire().data(); @@ -1237,10 +1237,10 @@ impl<'a> MacroParser<'a> { span, }, _, - )) = self.toks.look_ahead(0) + )) = self.iter.peek() { hi = span.hi(); - self.toks.next(); + self.iter.next(); } Some(MacroBranch { span: mk_sp(lo, hi),