Add newtype for raw idents

This commit is contained in:
clubby789 2024-02-13 23:28:27 +00:00
parent cce6a6e22e
commit 06d6c62f80
24 changed files with 148 additions and 87 deletions

View File

@ -107,7 +107,7 @@ impl Lit {
/// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation.
pub fn from_token(token: &Token) -> Option<Lit> {
match token.uninterpolate().kind {
Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)),
Literal(token_lit) => Some(token_lit),
Interpolated(ref nt)
if let NtExpr(expr) | NtLiteral(expr) = &nt.0
@ -183,7 +183,7 @@ impl LitKind {
}
}
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
let ident_token = Token::new(Ident(name, is_raw), span);
!ident_token.is_reserved_ident()
@ -214,7 +214,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}
fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool {
let ident_token = Token::new(Ident(name, is_raw), span);
!ident_token.is_reserved_ident()
@ -223,6 +223,24 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool {
.contains(&name)
}
#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)]
pub enum IdentIsRaw {
No,
Yes,
}
impl From<bool> for IdentIsRaw {
fn from(b: bool) -> Self {
if b { Self::Yes } else { Self::No }
}
}
impl From<IdentIsRaw> for bool {
fn from(is_raw: IdentIsRaw) -> bool {
matches!(is_raw, IdentIsRaw::Yes)
}
}
// SAFETY: due to the `Clone` impl below, all fields of all variants other than
// `Interpolated` must impl `Copy`.
#[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)]
@ -298,7 +316,7 @@ pub enum TokenKind {
/// Do not forget about `NtIdent` when you want to match on identifiers.
/// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to
/// treat regular and interpolated identifiers in the same way.
Ident(Symbol, /* is_raw */ bool),
Ident(Symbol, IdentIsRaw),
/// Lifetime identifier token.
/// Do not forget about `NtLifetime` when you want to match on lifetime identifiers.
/// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to
@ -411,7 +429,7 @@ impl Token {
/// Recovers a `Token` from an `Ident`. This creates a raw identifier if necessary.
pub fn from_ast_ident(ident: Ident) -> Self {
Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span)
Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span)
}
/// For interpolated tokens, returns a span of the fragment to which the interpolated
@ -567,7 +585,7 @@ impl Token {
pub fn can_begin_literal_maybe_minus(&self) -> bool {
match self.uninterpolate().kind {
Literal(..) | BinOp(Minus) => true,
Ident(name, false) if name.is_bool_lit() => true,
Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true,
Interpolated(ref nt) => match &nt.0 {
NtLiteral(_) => true,
NtExpr(e) => match &e.kind {
@ -602,7 +620,7 @@ impl Token {
/// Returns an identifier if this token is an identifier.
#[inline]
pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> {
pub fn ident(&self) -> Option<(Ident, IdentIsRaw)> {
// We avoid using `Token::uninterpolate` here because it's slow.
match &self.kind {
&Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)),
@ -755,7 +773,7 @@ impl Token {
/// Returns `true` if the token is a non-raw identifier for which `pred` holds.
pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool {
match self.ident() {
Some((id, false)) => pred(id),
Some((id, IdentIsRaw::No)) => pred(id),
_ => false,
}
}
@ -806,7 +824,7 @@ impl Token {
_ => return None,
},
SingleQuote => match joint.kind {
Ident(name, false) => Lifetime(Symbol::intern(&format!("'{name}"))),
Ident(name, IdentIsRaw::No) => Lifetime(Symbol::intern(&format!("'{name}"))),
_ => return None,
},
@ -836,7 +854,7 @@ pub enum Nonterminal {
NtPat(P<ast::Pat>),
NtExpr(P<ast::Expr>),
NtTy(P<ast::Ty>),
NtIdent(Ident, /* is_raw */ bool),
NtIdent(Ident, IdentIsRaw),
NtLifetime(Ident),
NtLiteral(P<ast::Expr>),
/// Stuff inside brackets for attributes

View File

@ -656,7 +656,7 @@ impl TokenStream {
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Bracket,
[
TokenTree::token_alone(token::Ident(sym::doc, false), span),
TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span),
TokenTree::token_alone(token::Eq, span),
TokenTree::token_alone(
TokenKind::lit(token::StrRaw(num_of_hashes), data, None),

View File

@ -185,7 +185,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
// IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if`
(Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _))
if !Ident::new(*sym, *span).is_reserved() || *is_raw =>
if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) =>
{
false
}
@ -197,7 +197,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|| *sym == kw::Fn
|| *sym == kw::SelfUpper
|| *sym == kw::Pub
|| *is_raw =>
|| matches!(is_raw, IdentIsRaw::Yes) =>
{
false
}
@ -731,7 +731,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
token::NtBlock(e) => self.block_to_string(e),
token::NtStmt(e) => self.stmt_to_string(e),
token::NtPat(e) => self.pat_to_string(e),
token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(),
&token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw.into()).to_string(),
token::NtLifetime(e) => e.to_string(),
token::NtLiteral(e) => self.expr_to_string(e),
token::NtVis(e) => self.vis_to_string(e),
@ -795,7 +795,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
/* Name components */
token::Ident(s, is_raw) => {
IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string().into()
IdentPrinter::new(s, is_raw.into(), convert_dollar_crate).to_string().into()
}
token::Lifetime(s) => s.to_string().into(),

View File

@ -1,3 +1,4 @@
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter};
@ -416,7 +417,7 @@ fn parse_reg<'a>(
) -> PResult<'a, ast::InlineAsmRegOrRegClass> {
p.expect(&token::OpenDelim(Delimiter::Parenthesis))?;
let result = match p.token.uninterpolate().kind {
token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
*explicit_reg = true;
ast::InlineAsmRegOrRegClass::Reg(symbol)

View File

@ -1,7 +1,6 @@
use rustc_ast::{
ptr::P,
token,
token::Delimiter,
token::{self, Delimiter, IdentIsRaw},
tokenstream::{DelimSpan, TokenStream, TokenTree},
BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MethodCall, Mutability,
Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID,
@ -170,7 +169,10 @@ impl<'cx, 'a> Context<'cx, 'a> {
];
let captures = self.capture_decls.iter().flat_map(|cap| {
[
TokenTree::token_joint_hidden(token::Ident(cap.ident.name, false), cap.ident.span),
TokenTree::token_joint_hidden(
token::Ident(cap.ident.name, IdentIsRaw::No),
cap.ident.span,
),
TokenTree::token_alone(token::Comma, self.span),
]
});

View File

@ -107,7 +107,7 @@
use crate::errors;
use crate::mbe::{KleeneToken, TokenTree};
use rustc_ast::token::{Delimiter, Token, TokenKind};
use rustc_ast::token::{Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::{NodeId, DUMMY_NODE_ID};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{DiagnosticMessage, MultiSpan};
@ -409,7 +409,7 @@ fn check_nested_occurrences(
match (state, tt) {
(
NestedMacroState::Empty,
&TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }),
&TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }),
) => {
if name == kw::MacroRules {
state = NestedMacroState::MacroRules;

View File

@ -8,6 +8,7 @@ use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser}
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
use crate::mbe::transcribe::transcribe;
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*};
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
@ -1302,7 +1303,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
@ -1313,7 +1316,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
FatArrow | Comma | Eq => IsInFollow::Yes,
Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes,
Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
},
_ => IsInFollow::No(TOKENS),
@ -1336,7 +1341,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
| BinOp(token::Shr)
| Semi
| BinOp(token::Or) => IsInFollow::Yes,
Ident(name, false) if name == kw::As || name == kw::Where => {
Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
IsInFollow::Yes
}
_ => IsInFollow::No(TOKENS),
@ -1364,7 +1369,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
match tok {
TokenTree::Token(token) => match token.kind {
Comma => IsInFollow::Yes,
Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes,
Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
_ => {
if token.can_begin_type() {
IsInFollow::Yes

View File

@ -1,4 +1,4 @@
use rustc_ast::token::{self, Delimiter};
use rustc_ast::token::{self, Delimiter, IdentIsRaw};
use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree};
use rustc_ast::{LitIntType, LitKind};
use rustc_ast_pretty::pprust;
@ -142,7 +142,7 @@ fn parse_ident<'sess>(
if let Some(tt) = iter.next()
&& let TokenTree::Token(token, _) = tt
{
if let Some((elem, false)) = token.ident() {
if let Some((elem, IdentIsRaw::No)) = token.ident() {
return Ok(elem);
}
let token_str = pprust::token_to_string(token);

View File

@ -2,7 +2,7 @@ use crate::errors;
use crate::mbe::macro_parser::count_metavar_decls;
use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree};
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::token::{self, Delimiter, IdentIsRaw, Token};
use rustc_ast::{tokenstream, NodeId};
use rustc_ast_pretty::pprust;
use rustc_feature::Features;
@ -222,7 +222,7 @@ fn parse_tree<'a>(
Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = ident.span.with_lo(span.lo());
if ident.name == kw::Crate && !is_raw {
if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) {
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
} else {
TokenTree::MetaVar(span, ident)

View File

@ -2,6 +2,7 @@ use crate::tests::{
matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error,
};
use ast::token::IdentIsRaw;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
@ -74,9 +75,12 @@ fn string_to_tts_macro() {
match tts {
[
TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. },
_,
),
TokenTree::Token(Token { kind: token::Not, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _),
TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _),
TokenTree::Delimited(.., macro_delim, macro_tts),
] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => {
let tts = &macro_tts.trees().collect::<Vec<_>>();
@ -90,7 +94,10 @@ fn string_to_tts_macro() {
match &tts[..] {
[
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
_,
),
] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {
}
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
@ -99,7 +106,10 @@ fn string_to_tts_macro() {
match &tts[..] {
[
TokenTree::Token(Token { kind: token::Dollar, .. }, _),
TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _),
TokenTree::Token(
Token { kind: token::Ident(name, IdentIsRaw::No), .. },
_,
),
] if second_delim == &Delimiter::Parenthesis
&& name.as_str() == "a" => {}
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
@ -119,8 +129,11 @@ fn string_to_tts_1() {
let tts = string_to_stream("fn a(b: i32) { b; }".to_string());
let expected = TokenStream::new(vec![
TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)),
TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)),
TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)),
TokenTree::token_joint_hidden(
token::Ident(Symbol::intern("a"), IdentIsRaw::No),
sp(3, 4),
),
TokenTree::Delimited(
DelimSpan::from_pair(sp(4, 5), sp(11, 12)),
// `JointHidden` because the `(` is followed immediately by
@ -128,10 +141,16 @@ fn string_to_tts_1() {
DelimSpacing::new(Spacing::JointHidden, Spacing::Alone),
Delimiter::Parenthesis,
TokenStream::new(vec![
TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)),
TokenTree::token_joint(
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
sp(5, 6),
),
TokenTree::token_alone(token::Colon, sp(6, 7)),
// `JointHidden` because the `i32` is immediately followed by the `)`.
TokenTree::token_joint_hidden(token::Ident(sym::i32, false), sp(8, 11)),
TokenTree::token_joint_hidden(
token::Ident(sym::i32, IdentIsRaw::No),
sp(8, 11),
),
])
.into(),
),
@ -143,7 +162,10 @@ fn string_to_tts_1() {
DelimSpacing::new(Spacing::Alone, Spacing::Alone),
Delimiter::Brace,
TokenStream::new(vec![
TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)),
TokenTree::token_joint(
token::Ident(Symbol::intern("b"), IdentIsRaw::No),
sp(15, 16),
),
// `Alone` because the `;` is followed by whitespace.
TokenTree::token_alone(token::Semi, sp(16, 17)),
])

View File

@ -1,4 +1,5 @@
use crate::base::ExtCtxt;
use ast::token::IdentIsRaw;
use pm::bridge::{
server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree,
};
@ -216,7 +217,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Question => op("?"),
SingleQuote => op("'"),
Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { sym, is_raw, span })),
Ident(sym, is_raw) => {
trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span }))
}
Lifetime(name) => {
let ident = symbol::Ident::new(name, span).without_first_quote();
trees.extend([
@ -238,7 +241,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
escaped.extend(ch.escape_debug());
}
let stream = [
Ident(sym::doc, false),
Ident(sym::doc, IdentIsRaw::No),
Eq,
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
]
@ -259,7 +262,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
Interpolated(ref nt) if let NtIdent(ident, is_raw) = &nt.0 => {
trees.push(TokenTree::Ident(Ident {
sym: ident.name,
is_raw: *is_raw,
is_raw: matches!(is_raw, IdentIsRaw::Yes),
span: ident.span,
}))
}
@ -352,7 +355,7 @@ impl ToInternal<SmallVec<[tokenstream::TokenTree; 2]>>
}
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
rustc.sess().symbol_gallery.insert(sym, span);
smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw), span)]
smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)]
}
TokenTree::Literal(self::Literal {
kind: self::LitKind::Integer,
@ -569,7 +572,7 @@ impl server::TokenStream for Rustc<'_, '_> {
match &expr.kind {
ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => {
Ok(tokenstream::TokenStream::token_alone(
token::Ident(token_lit.symbol, false),
token::Ident(token_lit.symbol, IdentIsRaw::No),
expr.span,
))
}

View File

@ -1,6 +1,6 @@
use crate::tests::string_to_stream;
use rustc_ast::token;
use rustc_ast::token::{self, IdentIsRaw};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_span::create_default_session_globals_then;
use rustc_span::{BytePos, Span, Symbol};
@ -86,7 +86,8 @@ fn test_diseq_1() {
fn test_is_empty() {
create_default_session_globals_then(|| {
let test0 = TokenStream::default();
let test1 = TokenStream::token_alone(token::Ident(Symbol::intern("a"), false), sp(0, 1));
let test1 =
TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1));
let test2 = string_to_ts("foo(bar::baz)");
assert_eq!(test0.is_empty(), true);

View File

@ -1821,7 +1821,7 @@ impl KeywordIdents {
match tt {
// Only report non-raw idents.
TokenTree::Token(token, _) => {
if let Some((ident, false)) = token.ident() {
if let Some((ident, token::IdentIsRaw::No)) = token.ident() {
self.check_ident_token(cx, UnderMacro(true), ident);
}
}

View File

@ -4,7 +4,7 @@ use crate::errors;
use crate::lexer::unicode_chars::UNICODE_ARRAY;
use crate::make_unclosed_delims_error;
use rustc_ast::ast::{self, AttrStyle};
use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind};
use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::util::unicode::contains_text_flow_control_chars;
use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey};
@ -181,7 +181,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
}
self.sess.raw_identifier_spans.push(span);
token::Ident(sym, true)
token::Ident(sym, IdentIsRaw::Yes)
}
rustc_lexer::TokenKind::UnknownPrefix => {
self.report_unknown_prefix(start);
@ -201,7 +201,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
let span = self.mk_sp(start, self.pos);
self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default()
.push(span);
token::Ident(sym, false)
token::Ident(sym, IdentIsRaw::No)
}
// split up (raw) c string literals to an ident and a string literal when edition < 2021.
rustc_lexer::TokenKind::Literal {
@ -339,7 +339,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> {
let sym = nfc_normalize(self.str_from(start));
let span = self.mk_sp(start, self.pos);
self.sess.symbol_gallery.insert(sym, span);
token::Ident(sym, false)
token::Ident(sym, IdentIsRaw::No)
}
/// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly

View File

@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[
// fancier error recovery to it, as there will be less overall work to do this way.
const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
(" ", "Space", None),
("_", "Underscore", Some(token::Ident(kw::Underscore, false))),
("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))),
("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))),
(",", "Comma", Some(token::Comma)),
(";", "Semicolon", Some(token::Semi)),

View File

@ -21,6 +21,7 @@ use crate::errors::{
use crate::fluent_generated as fluent;
use crate::parser;
use crate::parser::attr::InnerAttrPolicy;
use ast::token::IdentIsRaw;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind};
@ -264,7 +265,7 @@ impl<'a> Parser<'a> {
pub(super) fn expected_ident_found(
&mut self,
recover: bool,
) -> PResult<'a, (Ident, /* is_raw */ bool)> {
) -> PResult<'a, (Ident, IdentIsRaw)> {
if let TokenKind::DocComment(..) = self.prev_token.kind {
return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything {
span: self.prev_token.span,
@ -290,11 +291,11 @@ impl<'a> Parser<'a> {
let bad_token = self.token.clone();
// suggest prepending a keyword in identifier position with `r#`
let suggest_raw = if let Some((ident, false)) = self.token.ident()
let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.is_raw_guess()
&& self.look_ahead(1, |t| valid_follow.contains(&t.kind))
{
recovered_ident = Some((ident, true));
recovered_ident = Some((ident, IdentIsRaw::Yes));
// `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`,
// which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#`
@ -320,7 +321,7 @@ impl<'a> Parser<'a> {
let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| {
let (invalid, valid) = self.token.span.split_at(len as u32);
recovered_ident = Some((Ident::new(valid_portion, valid), false));
recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No));
HelpIdentifierStartsWithNumber { num_span: invalid }
});
@ -653,9 +654,9 @@ impl<'a> Parser<'a> {
// positive for a `cr#` that wasn't intended to start a c-string literal, but identifying
// that in the parser requires unbounded lookahead, so we only add a hint to the existing
// error rather than replacing it entirely.
if ((self.prev_token.kind == TokenKind::Ident(sym::c, false)
if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No)
&& matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. })))
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, false)
|| (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No)
&& matches!(
&self.token.kind,
TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound

View File

@ -10,6 +10,7 @@ use super::{
use crate::errors;
use crate::maybe_recover_from_interpolated_ty_qpath;
use ast::mut_visit::{noop_visit_expr, MutVisitor};
use ast::token::IdentIsRaw;
use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment};
use core::mem;
use rustc_ast::ptr::P;
@ -128,7 +129,7 @@ impl<'a> Parser<'a> {
match self.parse_expr_res(restrictions, None) {
Ok(expr) => Ok(expr),
Err(err) => match self.token.ident() {
Some((Ident { name: kw::Underscore, .. }, false))
Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No))
if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) =>
{
// Special-case handling of `foo(_, _, _)`
@ -459,7 +460,9 @@ impl<'a> Parser<'a> {
return None;
}
(Some(op), _) => (op, self.token.span),
(None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => {
(None, Some((Ident { name: sym::and, span }, IdentIsRaw::No)))
if self.may_recover() =>
{
self.dcx().emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "and".into(),
@ -467,7 +470,7 @@ impl<'a> Parser<'a> {
});
(AssocOp::LAnd, span)
}
(None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => {
(None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => {
self.dcx().emit_err(errors::InvalidLogicalOperator {
span: self.token.span,
incorrect: "or".into(),
@ -744,7 +747,7 @@ impl<'a> Parser<'a> {
(
// `foo: `
ExprKind::Path(None, ast::Path { segments, .. }),
token::Ident(kw::For | kw::Loop | kw::While, false),
token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No),
) if segments.len() == 1 => {
let snapshot = self.create_snapshot_for_diagnostic();
let label = Label {
@ -957,19 +960,20 @@ impl<'a> Parser<'a> {
fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
loop {
let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
// we are using noexpect here because we don't expect a `?` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Question)
} else {
self.eat(&token::Question)
};
let has_question =
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// we are using noexpect here because we don't expect a `?` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Question)
} else {
self.eat(&token::Question)
};
if has_question {
// `expr?`
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
continue;
}
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) {
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
// we are using noexpect here because we don't expect a `.` directly after a `return`
// which could be suggested otherwise
self.eat_noexpect(&token::Dot)
@ -1128,19 +1132,19 @@ impl<'a> Parser<'a> {
// 1.
DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
assert!(suffix.is_none());
self.token = Token::new(token::Ident(sym, false), ident_span);
self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
}
// 1.2 | 1.2e3
DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
self.token = Token::new(token::Ident(symbol1, false), ident1_span);
self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span);
// This needs to be `Spacing::Alone` to prevent regressions.
// See issue #76399 and PR #76285 for more details
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
let base1 =
self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span);
let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span);
self.bump_with((next_token2, self.token_spacing)); // `.`
self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
}
@ -1948,7 +1952,7 @@ impl<'a> Parser<'a> {
self.bump(); // `builtin`
self.bump(); // `#`
let Some((ident, false)) = self.token.ident() else {
let Some((ident, IdentIsRaw::No)) = self.token.ident() else {
let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span });
return Err(err);
};
@ -3576,7 +3580,7 @@ impl<'a> Parser<'a> {
fn find_struct_error_after_field_looking_code(&self) -> Option<ExprField> {
match self.token.ident() {
Some((ident, is_raw))
if (is_raw || !ident.is_reserved())
if (matches!(is_raw, IdentIsRaw::Yes) || !ident.is_reserved())
&& self.look_ahead(1, |t| *t == token::Colon) =>
{
Some(ast::ExprField {

View File

@ -3,6 +3,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent;
use ast::token::IdentIsRaw;
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind};
@ -1079,7 +1080,7 @@ impl<'a> Parser<'a> {
fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident @ Ident { name: kw::Underscore, .. }, false)) => {
Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => {
self.bump();
Ok(ident)
}
@ -1965,7 +1966,7 @@ impl<'a> Parser<'a> {
let (ident, is_raw) = self.ident_or_err(true)?;
if ident.name == kw::Underscore {
self.sess.gated_spans.gate(sym::unnamed_fields, lo);
} else if !is_raw && ident.is_reserved() {
} else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
let snapshot = self.create_snapshot_for_diagnostic();
let err = if self.check_fn_front_matter(false, Case::Sensitive) {
let inherited_vis = Visibility {
@ -2743,7 +2744,7 @@ impl<'a> Parser<'a> {
fn parse_self_param(&mut self) -> PResult<'a, Option<Param>> {
// Extract an identifier *after* having confirmed that the token is one.
let expect_self_ident = |this: &mut Self| match this.token.ident() {
Some((ident, false)) => {
Some((ident, IdentIsRaw::No)) => {
this.bump();
ident
}

View File

@ -11,6 +11,7 @@ mod stmt;
mod ty;
use crate::lexer::UnmatchedDelim;
use ast::token::IdentIsRaw;
pub use attr_wrapper::AttrWrapper;
pub use diagnostics::AttemptLocalParseRecovery;
pub(crate) use expr::ForbiddenLetReason;
@ -499,7 +500,7 @@ impl<'a> Parser<'a> {
fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
let (ident, is_raw) = self.ident_or_err(recover)?;
if !is_raw && ident.is_reserved() {
if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() {
let err = self.expected_ident_found_err();
if recover {
err.emit();
@ -511,7 +512,7 @@ impl<'a> Parser<'a> {
Ok(ident)
}
fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> {
fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> {
match self.token.ident() {
Some(ident) => Ok(ident),
None => self.expected_ident_found(recover),
@ -568,7 +569,7 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
&& let Some((ident, /* is_raw */ false)) = self.token.ident()
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
{
true
@ -598,7 +599,7 @@ impl<'a> Parser<'a> {
}
if case == Case::Insensitive
&& let Some((ident, /* is_raw */ false)) = self.token.ident()
&& let Some((ident, IdentIsRaw::No)) = self.token.ident()
&& ident.as_str().to_lowercase() == kw.as_str().to_lowercase()
{
self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() });

View File

@ -201,6 +201,6 @@ impl<'a> Parser<'a> {
/// The token is an identifier, but not `_`.
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> {
token.ident().filter(|(ident, _)| ident.name != kw::Underscore)
}

View File

@ -311,7 +311,7 @@ impl<'a> Parser<'a> {
matches!(
&token.uninterpolate().kind,
token::FatArrow // e.g. `a | => 0,`.
| token::Ident(kw::If, false) // e.g. `a | if expr`.
| token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`.
| token::Eq // e.g. `let a | = 0`.
| token::Semi // e.g. `let a |;`.
| token::Colon // e.g. `let a | :`.

View File

@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{Parser, Restrictions, TokenType};
use crate::errors::PathSingleColon;
use crate::{errors, maybe_whole};
use ast::token::IdentIsRaw;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::{
@ -390,7 +391,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> {
match self.token.ident() {
Some((ident, false)) if ident.is_path_segment_keyword() => {
Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => {
self.bump();
Ok(ident)
}

View File

@ -691,7 +691,7 @@ impl<'a> Parser<'a> {
token.kind,
token::Ident(
kw::For | kw::Loop | kw::While,
false
token::IdentIsRaw::No
) | token::OpenDelim(Delimiter::Brace)
)
})

View File

@ -1,4 +1,4 @@
use rustc_ast::token::{self, BinOpToken, Delimiter};
use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast_pretty::pprust::state::State as Printer;
use rustc_ast_pretty::pprust::PrintState;
@ -148,7 +148,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
(false, Other)
}
(Pound, token::Not) => (false, PoundBang),
(_, token::Ident(symbol, /* is_raw */ false))
(_, token::Ident(symbol, IdentIsRaw::No))
if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) =>
{
(true, Ident)