Skip to content

parser: Remove Deref impl from Parser #61616

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jun 8, 2019
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
@@ -735,9 +735,9 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
raw_attr.clone(),
);

let start_span = parser.span;
let start_span = parser.token.span;
let (path, tokens) = panictry!(parser.parse_meta_item_unrestricted());
let end_span = parser.span;
let end_span = parser.token.span;
if parser.token != token::Eof {
parse_sess.span_diagnostic
.span_err(start_span.to(end_span), "invalid crate attribute");
2 changes: 1 addition & 1 deletion src/libsyntax/config.rs
Original file line number Diff line number Diff line change
@@ -121,7 +121,7 @@ impl<'a> StripUnconfigured<'a> {
let mut expanded_attrs = Vec::with_capacity(1);

while !parser.check(&token::CloseDelim(token::Paren)) {
let lo = parser.span.lo();
let lo = parser.token.span.lo();
let (path, tokens) = parser.parse_meta_item_unrestricted()?;
expanded_attrs.push((path, tokens, parser.prev_span.with_lo(lo)));
parser.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?;
2 changes: 1 addition & 1 deletion src/libsyntax/ext/expand.rs
Original file line number Diff line number Diff line change
@@ -1041,7 +1041,7 @@ impl<'a> Parser<'a> {
let msg = format!("macro expansion ignores token `{}` and any following",
self.this_token_to_string());
// Avoid emitting backtrace info twice.
let def_site_span = self.span.with_ctxt(SyntaxContext::empty());
let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty());
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
err.span_label(span, "caused by the macro expansion here");
let msg = format!(
2 changes: 1 addition & 1 deletion src/libsyntax/ext/source_util.rs
Original file line number Diff line number Diff line change
@@ -105,7 +105,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstrea
while self.p.token != token::Eof {
match panictry!(self.p.parse_item()) {
Some(item) => ret.push(item),
None => self.p.diagnostic().span_fatal(self.p.span,
None => self.p.diagnostic().span_fatal(self.p.token.span,
&format!("expected item, found `{}`",
self.p.this_token_to_string()))
.raise()
14 changes: 7 additions & 7 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
@@ -675,7 +675,7 @@ pub fn parse(
//
// This MatcherPos instance is allocated on the stack. All others -- and
// there are frequently *no* others! -- are allocated on the heap.
let mut initial = initial_matcher_pos(ms, parser.span);
let mut initial = initial_matcher_pos(ms, parser.token.span);
let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
let mut next_items = Vec::new();

@@ -721,15 +721,15 @@ pub fn parse(
return nameize(sess, ms, matches);
} else if eof_items.len() > 1 {
return Error(
parser.span,
parser.token.span,
"ambiguity: multiple successful parses".to_string(),
);
} else {
return Failure(
Token::new(token::Eof, if parser.span.is_dummy() {
parser.span
Token::new(token::Eof, if parser.token.span.is_dummy() {
parser.token.span
} else {
sess.source_map().next_point(parser.span)
sess.source_map().next_point(parser.token.span)
}),
"missing tokens in macro arguments",
);
@@ -753,7 +753,7 @@ pub fn parse(
.join(" or ");

return Error(
parser.span,
parser.token.span,
format!(
"local ambiguity: multiple parsing options: {}",
match next_items.len() {
@@ -927,7 +927,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal {
sym::ty => token::NtTy(panictry!(p.parse_ty())),
// this could be handled like a token, since it is one
sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) {
let span = p.span;
let span = p.token.span;
p.bump();
token::NtIdent(Ident::new(name, span), is_raw)
} else {
4 changes: 2 additions & 2 deletions src/libsyntax/ext/tt/macro_rules.rs
Original file line number Diff line number Diff line change
@@ -47,7 +47,7 @@ impl<'a> ParserAnyMacro<'a> {
let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
if !e.span.is_dummy() { // early end of macro arm (#52866)
e.replace_span_with(parser.sess.source_map().next_point(parser.span));
e.replace_span_with(parser.sess.source_map().next_point(parser.token.span));
}
let msg = &e.message[0];
e.message[0] = (
@@ -63,7 +63,7 @@ impl<'a> ParserAnyMacro<'a> {
if parser.sess.source_map().span_to_filename(arm_span).is_real() {
e.span_label(arm_span, "in this macro arm");
}
} else if !parser.sess.source_map().span_to_filename(parser.span).is_real() {
} else if !parser.sess.source_map().span_to_filename(parser.token.span).is_real() {
e.span_label(site_span, "in this macro invocation");
}
e
12 changes: 6 additions & 6 deletions src/libsyntax/parse/attr.rs
Original file line number Diff line number Diff line change
@@ -39,7 +39,7 @@ impl<'a> Parser<'a> {
just_parsed_doc_comment = false;
}
token::DocComment(s) => {
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span);
if attr.style != ast::AttrStyle::Outer {
let mut err = self.fatal("expected outer doc comment");
err.note("inner doc comments like this (starting with \
@@ -83,7 +83,7 @@ impl<'a> Parser<'a> {
self.token);
let (span, path, tokens, style) = match self.token.kind {
token::Pound => {
let lo = self.span;
let lo = self.token.span;
self.bump();

if let InnerAttributeParsePolicy::Permitted = inner_parse_policy {
@@ -93,7 +93,7 @@ impl<'a> Parser<'a> {
self.bump();
if let InnerAttributeParsePolicy::NotPermitted { reason } = inner_parse_policy
{
let span = self.span;
let span = self.token.span;
self.diagnostic()
.struct_span_err(span, reason)
.note("inner attributes, like `#![no_std]`, annotate the item \
@@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
}
token::DocComment(s) => {
// we need to get the position of this token before we bump.
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span);
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span);
if attr.style == ast::AttrStyle::Inner {
attrs.push(attr);
self.bump();
@@ -249,7 +249,7 @@ impl<'a> Parser<'a> {
return Ok(meta);
}

let lo = self.span;
let lo = self.token.span;
let path = self.parse_path(PathStyle::Mod)?;
let node = self.parse_meta_item_kind()?;
let span = lo.to(self.prev_span);
@@ -284,7 +284,7 @@ impl<'a> Parser<'a> {

let found = self.this_token_to_string();
let msg = format!("expected unsuffixed literal or identifier, found `{}`", found);
Err(self.diagnostic().struct_span_err(self.span, &msg))
Err(self.diagnostic().struct_span_err(self.token.span, &msg))
}

/// matches meta_seq = ( COMMASEP(meta_item_inner) )
Loading