1use diagnostics::make_errors_for_mismatched_closing_delims;
2use rustc_ast::ast::{self, AttrStyle};
3use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
4use rustc_ast::tokenstream::TokenStream;
5use rustc_ast::util::unicode::{TEXT_FLOW_CONTROL_CHARS, contains_text_flow_control_chars};
6use rustc_errors::codes::*;
7use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
8use rustc_lexer::{
9 Base, Cursor, DocStyle, FrontmatterAllowed, LiteralKind, RawStrError, is_horizontal_whitespace,
10};
11use rustc_literal_escaper::{EscapeError, Mode, check_for_errors};
12use rustc_session::lint::BuiltinLintDiag;
13use rustc_session::lint::builtin::{
14 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
15 TEXT_DIRECTION_CODEPOINT_IN_COMMENT, TEXT_DIRECTION_CODEPOINT_IN_LITERAL,
16};
17use rustc_session::parse::ParseSess;
18use rustc_span::{BytePos, Pos, Span, Symbol, sym};
19use tracing::debug;
20
21use crate::errors;
22use crate::lexer::diagnostics::TokenTreeDiagInfo;
23use crate::lexer::unicode_chars::UNICODE_ARRAY;
24
25mod diagnostics;
26mod tokentrees;
27mod unescape_error_reporting;
28mod unicode_chars;
29
30use unescape_error_reporting::{emit_unescape_error, escaped_char};
31
32#[cfg(target_pointer_width = "64")]
37rustc_data_structures::static_assert_size!(rustc_lexer::Token, 12);
38
39const INVISIBLE_CHARACTERS: [char; 8] = [
40 '\u{200b}', '\u{200c}', '\u{2060}', '\u{2061}', '\u{2062}', '\u{00ad}', '\u{034f}', '\u{061c}',
41];
42
43#[derive(Clone, Debug)]
44pub(crate) struct UnmatchedDelim {
45 pub found_delim: Option<Delimiter>,
46 pub found_span: Span,
47 pub unclosed_span: Option<Span>,
48 pub candidate_span: Option<Span>,
49}
50
51pub enum StripTokens {
53 ShebangAndFrontmatter,
55 Shebang,
60 Nothing,
65}
66
67pub(crate) fn lex_token_trees<'psess, 'src>(
68 psess: &'psess ParseSess,
69 mut src: &'src str,
70 mut start_pos: BytePos,
71 override_span: Option<Span>,
72 strip_tokens: StripTokens,
73) -> Result<TokenStream, Vec<Diag<'psess>>> {
74 match strip_tokens {
75 StripTokens::Shebang | StripTokens::ShebangAndFrontmatter => {
76 if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
77 src = &src[shebang_len..];
78 start_pos = start_pos + BytePos::from_usize(shebang_len);
79 }
80 }
81 StripTokens::Nothing => {}
82 }
83
84 let frontmatter_allowed = match strip_tokens {
85 StripTokens::ShebangAndFrontmatter => FrontmatterAllowed::Yes,
86 StripTokens::Shebang | StripTokens::Nothing => FrontmatterAllowed::No,
87 };
88
89 let cursor = Cursor::new(src, frontmatter_allowed);
90 let mut lexer = Lexer {
91 psess,
92 start_pos,
93 pos: start_pos,
94 src,
95 cursor,
96 override_span,
97 nbsp_is_whitespace: false,
98 last_lifetime: None,
99 token: Token::dummy(),
100 diag_info: TokenTreeDiagInfo::default(),
101 };
102 let res = lexer.lex_token_trees(false);
103
104 let mut unmatched_closing_delims: Vec<_> =
105 make_errors_for_mismatched_closing_delims(&lexer.diag_info.unmatched_delims, psess);
106
107 match res {
108 Ok((_open_spacing, stream)) => {
109 if unmatched_closing_delims.is_empty() {
110 Ok(stream)
111 } else {
112 Err(unmatched_closing_delims)
114 }
115 }
116 Err(errs) => {
117 unmatched_closing_delims.extend(errs);
120 Err(unmatched_closing_delims)
121 }
122 }
123}
124
125struct Lexer<'psess, 'src> {
126 psess: &'psess ParseSess,
127 start_pos: BytePos,
129 pos: BytePos,
131 src: &'src str,
133 cursor: Cursor<'src>,
135 override_span: Option<Span>,
136 nbsp_is_whitespace: bool,
140
141 last_lifetime: Option<Span>,
144
145 token: Token,
147
148 diag_info: TokenTreeDiagInfo,
149}
150
151impl<'psess, 'src> Lexer<'psess, 'src> {
152 fn dcx(&self) -> DiagCtxtHandle<'psess> {
153 self.psess.dcx()
154 }
155
156 fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
157 self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
158 }
159
160 fn next_token_from_cursor(&mut self) -> (Token, bool) {
163 let mut preceded_by_whitespace = false;
164 let mut swallow_next_invalid = 0;
165 loop {
167 let str_before = self.cursor.as_str();
168 let token = self.cursor.advance_token();
169 let start = self.pos;
170 self.pos = self.pos + BytePos(token.len);
171
172 debug!("next_token: {:?}({:?})", token.kind, self.str_from(start));
173
174 if let rustc_lexer::TokenKind::Semi
175 | rustc_lexer::TokenKind::LineComment { .. }
176 | rustc_lexer::TokenKind::BlockComment { .. }
177 | rustc_lexer::TokenKind::CloseParen
178 | rustc_lexer::TokenKind::CloseBrace
179 | rustc_lexer::TokenKind::CloseBracket = token.kind
180 {
181 self.last_lifetime = None;
184 }
185
186 let kind = match token.kind {
190 rustc_lexer::TokenKind::LineComment { doc_style } => {
191 let Some(doc_style) = doc_style else {
193 self.lint_unicode_text_flow(start);
194 preceded_by_whitespace = true;
195 continue;
196 };
197
198 let content_start = start + BytePos(3);
200 let content = self.str_from(content_start);
201 self.lint_doc_comment_unicode_text_flow(start, content);
202 self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
203 }
204 rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
205 if !terminated {
206 self.report_unterminated_block_comment(start, doc_style);
207 }
208
209 let Some(doc_style) = doc_style else {
211 self.lint_unicode_text_flow(start);
212 preceded_by_whitespace = true;
213 continue;
214 };
215
216 let content_start = start + BytePos(3);
219 let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
220 let content = self.str_from_to(content_start, content_end);
221 self.lint_doc_comment_unicode_text_flow(start, content);
222 self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
223 }
224 rustc_lexer::TokenKind::Frontmatter { has_invalid_preceding_whitespace, invalid_infostring } => {
225 self.validate_frontmatter(start, has_invalid_preceding_whitespace, invalid_infostring);
226 preceded_by_whitespace = true;
227 continue;
228 }
229 rustc_lexer::TokenKind::Whitespace => {
230 preceded_by_whitespace = true;
231 continue;
232 }
233 rustc_lexer::TokenKind::Ident => self.ident(start),
234 rustc_lexer::TokenKind::RawIdent => {
235 let sym = nfc_normalize(self.str_from(start + BytePos(2)));
236 let span = self.mk_sp(start, self.pos);
237 self.psess.symbol_gallery.insert(sym, span);
238 if !sym.can_be_raw() {
239 self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
240 }
241 self.psess.raw_identifier_spans.push(span);
242 token::Ident(sym, IdentIsRaw::Yes)
243 }
244 rustc_lexer::TokenKind::UnknownPrefix => {
245 self.report_unknown_prefix(start);
246 self.ident(start)
247 }
248 rustc_lexer::TokenKind::UnknownPrefixLifetime => {
249 self.report_unknown_prefix(start);
250 let lifetime_name = self.str_from(start);
254 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
255 let ident = Symbol::intern(lifetime_name);
256 token::Lifetime(ident, IdentIsRaw::No)
257 }
258 rustc_lexer::TokenKind::InvalidIdent
259 if !UNICODE_ARRAY.iter().any(|&(c, _, _)| {
262 let sym = self.str_from(start);
263 sym.chars().count() == 1 && c == sym.chars().next().unwrap()
264 }) =>
265 {
266 let sym = nfc_normalize(self.str_from(start));
267 let span = self.mk_sp(start, self.pos);
268 self.psess
269 .bad_unicode_identifiers
270 .borrow_mut()
271 .entry(sym)
272 .or_default()
273 .push(span);
274 token::Ident(sym, IdentIsRaw::No)
275 }
276 rustc_lexer::TokenKind::Literal {
279 kind: kind @ (LiteralKind::CStr { .. } | LiteralKind::RawCStr { .. }),
280 suffix_start: _,
281 } if !self.mk_sp(start, self.pos).edition().at_least_rust_2021() => {
282 let prefix_len = match kind {
283 LiteralKind::CStr { .. } => 1,
284 LiteralKind::RawCStr { .. } => 2,
285 _ => unreachable!(),
286 };
287
288 let lit_start = start + BytePos(prefix_len);
291 self.pos = lit_start;
292 self.cursor = Cursor::new(&str_before[prefix_len as usize..], FrontmatterAllowed::No);
293 self.report_unknown_prefix(start);
294 let prefix_span = self.mk_sp(start, lit_start);
295 return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace);
296 }
297 rustc_lexer::TokenKind::GuardedStrPrefix => {
298 self.maybe_report_guarded_str(start, str_before)
299 }
300 rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
301 let suffix_start = start + BytePos(suffix_start);
302 let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
303 let suffix = if suffix_start < self.pos {
304 let string = self.str_from(suffix_start);
305 if string == "_" {
306 self.dcx().emit_err(errors::UnderscoreLiteralSuffix {
307 span: self.mk_sp(suffix_start, self.pos),
308 });
309 None
310 } else {
311 Some(Symbol::intern(string))
312 }
313 } else {
314 None
315 };
316 self.lint_literal_unicode_text_flow(symbol, kind, self.mk_sp(start, self.pos), "literal");
317 token::Literal(token::Lit { kind, symbol, suffix })
318 }
319 rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
320 let lifetime_name = nfc_normalize(self.str_from(start));
324 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
325 if starts_with_number {
326 let span = self.mk_sp(start, self.pos);
327 self.dcx()
328 .struct_err("lifetimes cannot start with a number")
329 .with_span(span)
330 .stash(span, StashKey::LifetimeIsChar);
331 }
332 token::Lifetime(lifetime_name, IdentIsRaw::No)
333 }
334 rustc_lexer::TokenKind::RawLifetime => {
335 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
336
337 let ident_start = start + BytePos(3);
338 let prefix_span = self.mk_sp(start, ident_start);
339
340 if prefix_span.at_least_rust_2021() {
341 if self.cursor.as_str().starts_with('\'') {
347 let lit_span = self.mk_sp(start, self.pos + BytePos(1));
348 let contents = self.str_from_to(start + BytePos(1), self.pos);
349 emit_unescape_error(
350 self.dcx(),
351 contents,
352 lit_span,
353 lit_span,
354 Mode::Char,
355 0..contents.len(),
356 EscapeError::MoreThanOneChar,
357 )
358 .expect("expected error");
359 }
360
361 let span = self.mk_sp(start, self.pos);
362
363 let lifetime_name_without_tick =
364 Symbol::intern(&self.str_from(ident_start));
365 if !lifetime_name_without_tick.can_be_raw() {
366 self.dcx().emit_err(
367 errors::CannotBeRawLifetime {
368 span,
369 ident: lifetime_name_without_tick
370 }
371 );
372 }
373
374 let mut lifetime_name =
376 String::with_capacity(lifetime_name_without_tick.as_str().len() + 1);
377 lifetime_name.push('\'');
378 lifetime_name += lifetime_name_without_tick.as_str();
379 let sym = nfc_normalize(&lifetime_name);
380
381 self.psess.raw_identifier_spans.push(span);
383
384 token::Lifetime(sym, IdentIsRaw::Yes)
385 } else {
386 self.psess.buffer_lint(
388 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
389 prefix_span,
390 ast::CRATE_NODE_ID,
391 BuiltinLintDiag::RawPrefix(prefix_span),
392 );
393
394 let lt_start = start + BytePos(2);
396 self.pos = lt_start;
397 self.cursor = Cursor::new(&str_before[2 as usize..], FrontmatterAllowed::No);
398
399 let lifetime_name = nfc_normalize(self.str_from(start));
400 token::Lifetime(lifetime_name, IdentIsRaw::No)
401 }
402 }
403 rustc_lexer::TokenKind::Semi => token::Semi,
404 rustc_lexer::TokenKind::Comma => token::Comma,
405 rustc_lexer::TokenKind::Dot => token::Dot,
406 rustc_lexer::TokenKind::OpenParen => token::OpenParen,
407 rustc_lexer::TokenKind::CloseParen => token::CloseParen,
408 rustc_lexer::TokenKind::OpenBrace => token::OpenBrace,
409 rustc_lexer::TokenKind::CloseBrace => token::CloseBrace,
410 rustc_lexer::TokenKind::OpenBracket => token::OpenBracket,
411 rustc_lexer::TokenKind::CloseBracket => token::CloseBracket,
412 rustc_lexer::TokenKind::At => token::At,
413 rustc_lexer::TokenKind::Pound => token::Pound,
414 rustc_lexer::TokenKind::Tilde => token::Tilde,
415 rustc_lexer::TokenKind::Question => token::Question,
416 rustc_lexer::TokenKind::Colon => token::Colon,
417 rustc_lexer::TokenKind::Dollar => token::Dollar,
418 rustc_lexer::TokenKind::Eq => token::Eq,
419 rustc_lexer::TokenKind::Bang => token::Bang,
420 rustc_lexer::TokenKind::Lt => token::Lt,
421 rustc_lexer::TokenKind::Gt => token::Gt,
422 rustc_lexer::TokenKind::Minus => token::Minus,
423 rustc_lexer::TokenKind::And => token::And,
424 rustc_lexer::TokenKind::Or => token::Or,
425 rustc_lexer::TokenKind::Plus => token::Plus,
426 rustc_lexer::TokenKind::Star => token::Star,
427 rustc_lexer::TokenKind::Slash => token::Slash,
428 rustc_lexer::TokenKind::Caret => token::Caret,
429 rustc_lexer::TokenKind::Percent => token::Percent,
430
431 rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
432 if swallow_next_invalid > 0 {
434 swallow_next_invalid -= 1;
435 continue;
436 }
437 let mut it = self.str_from_to_end(start).chars();
438 let c = it.next().unwrap();
439 if c == '\u{00a0}' {
440 if self.nbsp_is_whitespace {
444 preceded_by_whitespace = true;
445 continue;
446 }
447 self.nbsp_is_whitespace = true;
448 }
449 let repeats = it.take_while(|c1| *c1 == c).count();
450 let (token, sugg) =
457 unicode_chars::check_for_substitution(self, start, c, repeats + 1);
458 self.dcx().emit_err(errors::UnknownTokenStart {
459 span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
460 escaped: escaped_char(c),
461 sugg,
462 null: if c == '\x00' { Some(errors::UnknownTokenNull) } else { None },
463 invisible: if INVISIBLE_CHARACTERS.contains(&c) { Some(errors::InvisibleCharacter) } else { None },
464 repeat: if repeats > 0 {
465 swallow_next_invalid = repeats;
466 Some(errors::UnknownTokenRepeat { repeats })
467 } else {
468 None
469 },
470 });
471
472 if let Some(token) = token {
473 token
474 } else {
475 preceded_by_whitespace = true;
476 continue;
477 }
478 }
479 rustc_lexer::TokenKind::Eof => token::Eof,
480 };
481 let span = self.mk_sp(start, self.pos);
482 return (Token::new(kind, span), preceded_by_whitespace);
483 }
484 }
485
486 fn ident(&self, start: BytePos) -> TokenKind {
487 let sym = nfc_normalize(self.str_from(start));
488 let span = self.mk_sp(start, self.pos);
489 self.psess.symbol_gallery.insert(sym, span);
490 token::Ident(sym, IdentIsRaw::No)
491 }
492
493 fn lint_unicode_text_flow(&self, start: BytePos) {
496 let content_start = start + BytePos(2);
498 let content = self.str_from(content_start);
499 if contains_text_flow_control_chars(content) {
500 let span = self.mk_sp(start, self.pos);
501 self.psess.buffer_lint(
502 TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
503 span,
504 ast::CRATE_NODE_ID,
505 BuiltinLintDiag::UnicodeTextFlow(span, content.to_string()),
506 );
507 }
508 }
509
510 fn lint_doc_comment_unicode_text_flow(&mut self, start: BytePos, content: &str) {
511 if contains_text_flow_control_chars(content) {
512 self.report_text_direction_codepoint(
513 content,
514 self.mk_sp(start, self.pos),
515 0,
516 false,
517 "doc comment",
518 );
519 }
520 }
521
522 fn lint_literal_unicode_text_flow(
523 &mut self,
524 text: Symbol,
525 lit_kind: token::LitKind,
526 span: Span,
527 label: &'static str,
528 ) {
529 if !contains_text_flow_control_chars(text.as_str()) {
530 return;
531 }
532 let (padding, point_at_inner_spans) = match lit_kind {
533 token::LitKind::Str | token::LitKind::Char => (1, true),
535 token::LitKind::CStr => (2, true),
537 token::LitKind::StrRaw(n) => (n as u32 + 2, true),
539 token::LitKind::CStrRaw(n) => (n as u32 + 3, true),
541 token::LitKind::Err(_) => return,
543 _ => (0, false),
545 };
546 self.report_text_direction_codepoint(
547 text.as_str(),
548 span,
549 padding,
550 point_at_inner_spans,
551 label,
552 );
553 }
554
555 fn report_text_direction_codepoint(
556 &self,
557 text: &str,
558 span: Span,
559 padding: u32,
560 point_at_inner_spans: bool,
561 label: &str,
562 ) {
563 let spans: Vec<_> = text
565 .char_indices()
566 .filter_map(|(i, c)| {
567 TEXT_FLOW_CONTROL_CHARS.contains(&c).then(|| {
568 let lo = span.lo() + BytePos(i as u32 + padding);
569 (c, span.with_lo(lo).with_hi(lo + BytePos(c.len_utf8() as u32)))
570 })
571 })
572 .collect();
573
574 let label = label.to_string();
575 let count = spans.len();
576 let labels = point_at_inner_spans
577 .then_some(errors::HiddenUnicodeCodepointsDiagLabels { spans: spans.clone() });
578 let sub = if point_at_inner_spans && !spans.is_empty() {
579 errors::HiddenUnicodeCodepointsDiagSub::Escape { spans }
580 } else {
581 errors::HiddenUnicodeCodepointsDiagSub::NoEscape { spans }
582 };
583
584 self.psess.buffer_lint(
585 TEXT_DIRECTION_CODEPOINT_IN_LITERAL,
586 span,
587 ast::CRATE_NODE_ID,
588 errors::HiddenUnicodeCodepointsDiag { label, count, span_label: span, labels, sub },
589 );
590 }
591
592 fn validate_frontmatter(
593 &self,
594 start: BytePos,
595 has_invalid_preceding_whitespace: bool,
596 invalid_infostring: bool,
597 ) {
598 let s = self.str_from(start);
599 let real_start = s.find("---").unwrap();
600 let frontmatter_opening_pos = BytePos(real_start as u32) + start;
601 let s_new = &s[real_start..];
602 let within = s_new.trim_start_matches('-');
603 let len_opening = s_new.len() - within.len();
604
605 let frontmatter_opening_end_pos = frontmatter_opening_pos + BytePos(len_opening as u32);
606 if has_invalid_preceding_whitespace {
607 let line_start =
608 BytePos(s[..real_start].rfind("\n").map_or(0, |i| i as u32 + 1)) + start;
609 let span = self.mk_sp(line_start, frontmatter_opening_end_pos);
610 let label_span = self.mk_sp(line_start, frontmatter_opening_pos);
611 self.dcx().emit_err(errors::FrontmatterInvalidOpeningPrecedingWhitespace {
612 span,
613 note_span: label_span,
614 });
615 }
616
617 if invalid_infostring {
618 let line_end = s[real_start..].find('\n').unwrap_or(s[real_start..].len());
619 let span = self.mk_sp(
620 frontmatter_opening_end_pos,
621 frontmatter_opening_pos + BytePos(line_end as u32),
622 );
623 self.dcx().emit_err(errors::FrontmatterInvalidInfostring { span });
624 }
625
626 let last_line_start = within.rfind('\n').map_or(0, |i| i + 1);
627 let last_line = &within[last_line_start..];
628 let last_line_trimmed = last_line.trim_start_matches(is_horizontal_whitespace);
629 let last_line_start_pos = frontmatter_opening_end_pos + BytePos(last_line_start as u32);
630
631 let frontmatter_span = self.mk_sp(frontmatter_opening_pos, self.pos);
632 self.psess.gated_spans.gate(sym::frontmatter, frontmatter_span);
633
634 if !last_line_trimmed.starts_with("---") {
635 let label_span = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos);
636 self.dcx().emit_err(errors::FrontmatterUnclosed {
637 span: frontmatter_span,
638 note_span: label_span,
639 });
640 return;
641 }
642
643 if last_line_trimmed.len() != last_line.len() {
644 let line_end = last_line_start_pos + BytePos(last_line.len() as u32);
645 let span = self.mk_sp(last_line_start_pos, line_end);
646 let whitespace_end =
647 last_line_start_pos + BytePos((last_line.len() - last_line_trimmed.len()) as u32);
648 let label_span = self.mk_sp(last_line_start_pos, whitespace_end);
649 self.dcx().emit_err(errors::FrontmatterInvalidClosingPrecedingWhitespace {
650 span,
651 note_span: label_span,
652 });
653 }
654
655 let rest = last_line_trimmed.trim_start_matches('-');
656 let len_close = last_line_trimmed.len() - rest.len();
657 if len_close != len_opening {
658 let span = self.mk_sp(frontmatter_opening_pos, self.pos);
659 let opening = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos);
660 let last_line_close_pos = last_line_start_pos + BytePos(len_close as u32);
661 let close = self.mk_sp(last_line_start_pos, last_line_close_pos);
662 self.dcx().emit_err(errors::FrontmatterLengthMismatch {
663 span,
664 opening,
665 close,
666 len_opening,
667 len_close,
668 });
669 }
670
671 if u8::try_from(len_opening).is_err() {
673 self.dcx().emit_err(errors::FrontmatterTooManyDashes { len_opening });
674 }
675
676 if !rest.trim_matches(is_horizontal_whitespace).is_empty() {
677 let span = self.mk_sp(last_line_start_pos, self.pos);
678 self.dcx().emit_err(errors::FrontmatterExtraCharactersAfterClose { span });
679 }
680 }
681
682 fn cook_doc_comment(
683 &self,
684 content_start: BytePos,
685 content: &str,
686 comment_kind: CommentKind,
687 doc_style: DocStyle,
688 ) -> TokenKind {
689 if content.contains('\r') {
690 for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') {
691 let span = self.mk_sp(
692 content_start + BytePos(idx as u32),
693 content_start + BytePos(idx as u32 + 1),
694 );
695 let block = matches!(comment_kind, CommentKind::Block);
696 self.dcx().emit_err(errors::CrDocComment { span, block });
697 }
698 }
699
700 let attr_style = match doc_style {
701 DocStyle::Outer => AttrStyle::Outer,
702 DocStyle::Inner => AttrStyle::Inner,
703 };
704
705 token::DocComment(comment_kind, attr_style, Symbol::intern(content))
706 }
707
708 fn cook_lexer_literal(
709 &self,
710 start: BytePos,
711 end: BytePos,
712 kind: rustc_lexer::LiteralKind,
713 ) -> (token::LitKind, Symbol) {
714 match kind {
715 rustc_lexer::LiteralKind::Char { terminated } => {
716 if !terminated {
717 let mut err = self
718 .dcx()
719 .struct_span_fatal(self.mk_sp(start, end), "unterminated character literal")
720 .with_code(E0762);
721 if let Some(lt_sp) = self.last_lifetime {
722 err.multipart_suggestion(
723 "if you meant to write a string literal, use double quotes",
724 vec![
725 (lt_sp, "\"".to_string()),
726 (self.mk_sp(start, start + BytePos(1)), "\"".to_string()),
727 ],
728 Applicability::MaybeIncorrect,
729 );
730 }
731 err.emit()
732 }
733 self.cook_quoted(token::Char, Mode::Char, start, end, 1, 1) }
735 rustc_lexer::LiteralKind::Byte { terminated } => {
736 if !terminated {
737 self.dcx()
738 .struct_span_fatal(
739 self.mk_sp(start + BytePos(1), end),
740 "unterminated byte constant",
741 )
742 .with_code(E0763)
743 .emit()
744 }
745 self.cook_quoted(token::Byte, Mode::Byte, start, end, 2, 1) }
747 rustc_lexer::LiteralKind::Str { terminated } => {
748 if !terminated {
749 self.dcx()
750 .struct_span_fatal(
751 self.mk_sp(start, end),
752 "unterminated double quote string",
753 )
754 .with_code(E0765)
755 .emit()
756 }
757 self.cook_quoted(token::Str, Mode::Str, start, end, 1, 1) }
759 rustc_lexer::LiteralKind::ByteStr { terminated } => {
760 if !terminated {
761 self.dcx()
762 .struct_span_fatal(
763 self.mk_sp(start + BytePos(1), end),
764 "unterminated double quote byte string",
765 )
766 .with_code(E0766)
767 .emit()
768 }
769 self.cook_quoted(token::ByteStr, Mode::ByteStr, start, end, 2, 1)
770 }
772 rustc_lexer::LiteralKind::CStr { terminated } => {
773 if !terminated {
774 self.dcx()
775 .struct_span_fatal(
776 self.mk_sp(start + BytePos(1), end),
777 "unterminated C string",
778 )
779 .with_code(E0767)
780 .emit()
781 }
782 self.cook_quoted(token::CStr, Mode::CStr, start, end, 2, 1) }
784 rustc_lexer::LiteralKind::RawStr { n_hashes } => {
785 if let Some(n_hashes) = n_hashes {
786 let n = u32::from(n_hashes);
787 let kind = token::StrRaw(n_hashes);
788 self.cook_quoted(kind, Mode::RawStr, start, end, 2 + n, 1 + n)
789 } else {
791 self.report_raw_str_error(start, 1);
792 }
793 }
794 rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
795 if let Some(n_hashes) = n_hashes {
796 let n = u32::from(n_hashes);
797 let kind = token::ByteStrRaw(n_hashes);
798 self.cook_quoted(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n)
799 } else {
801 self.report_raw_str_error(start, 2);
802 }
803 }
804 rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
805 if let Some(n_hashes) = n_hashes {
806 let n = u32::from(n_hashes);
807 let kind = token::CStrRaw(n_hashes);
808 self.cook_quoted(kind, Mode::RawCStr, start, end, 3 + n, 1 + n)
809 } else {
811 self.report_raw_str_error(start, 2);
812 }
813 }
814 rustc_lexer::LiteralKind::Int { base, empty_int } => {
815 let mut kind = token::Integer;
816 if empty_int {
817 let span = self.mk_sp(start, end);
818 let guar = self.dcx().emit_err(errors::NoDigitsLiteral { span });
819 kind = token::Err(guar);
820 } else if matches!(base, Base::Binary | Base::Octal) {
821 let base = base as u32;
822 let s = self.str_from_to(start + BytePos(2), end);
823 for (idx, c) in s.char_indices() {
824 let span = self.mk_sp(
825 start + BytePos::from_usize(2 + idx),
826 start + BytePos::from_usize(2 + idx + c.len_utf8()),
827 );
828 if c != '_' && c.to_digit(base).is_none() {
829 let guar =
830 self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
831 kind = token::Err(guar);
832 }
833 }
834 }
835 (kind, self.symbol_from_to(start, end))
836 }
837 rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
838 let mut kind = token::Float;
839 if empty_exponent {
840 let span = self.mk_sp(start, self.pos);
841 let guar = self.dcx().emit_err(errors::EmptyExponentFloat { span });
842 kind = token::Err(guar);
843 }
844 let base = match base {
845 Base::Hexadecimal => Some("hexadecimal"),
846 Base::Octal => Some("octal"),
847 Base::Binary => Some("binary"),
848 _ => None,
849 };
850 if let Some(base) = base {
851 let span = self.mk_sp(start, end);
852 let guar =
853 self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
854 kind = token::Err(guar)
855 }
856 (kind, self.symbol_from_to(start, end))
857 }
858 }
859 }
860
861 #[inline]
862 fn src_index(&self, pos: BytePos) -> usize {
863 (pos - self.start_pos).to_usize()
864 }
865
866 fn str_from(&self, start: BytePos) -> &'src str {
869 self.str_from_to(start, self.pos)
870 }
871
872 fn symbol_from_to(&self, start: BytePos, end: BytePos) -> Symbol {
874 debug!("taking an ident from {:?} to {:?}", start, end);
875 Symbol::intern(self.str_from_to(start, end))
876 }
877
878 fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
880 &self.src[self.src_index(start)..self.src_index(end)]
881 }
882
883 fn str_from_to_end(&self, start: BytePos) -> &'src str {
885 &self.src[self.src_index(start)..]
886 }
887
888 fn report_raw_str_error(&self, start: BytePos, prefix_len: u32) -> ! {
889 match rustc_lexer::validate_raw_str(self.str_from(start), prefix_len) {
890 Err(RawStrError::InvalidStarter { bad_char }) => {
891 self.report_non_started_raw_string(start, bad_char)
892 }
893 Err(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self
894 .report_unterminated_raw_string(start, expected, possible_terminator_offset, found),
895 Err(RawStrError::TooManyDelimiters { found }) => {
896 self.report_too_many_hashes(start, found)
897 }
898 Ok(()) => panic!("no error found for supposedly invalid raw string literal"),
899 }
900 }
901
902 fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
903 self.dcx()
904 .struct_span_fatal(
905 self.mk_sp(start, self.pos),
906 format!(
907 "found invalid character; only `#` is allowed in raw string delimitation: {}",
908 escaped_char(bad_char)
909 ),
910 )
911 .emit()
912 }
913
914 fn report_unterminated_raw_string(
915 &self,
916 start: BytePos,
917 n_hashes: u32,
918 possible_offset: Option<u32>,
919 found_terminators: u32,
920 ) -> ! {
921 let mut err =
922 self.dcx().struct_span_fatal(self.mk_sp(start, start), "unterminated raw string");
923 err.code(E0748);
924 err.span_label(self.mk_sp(start, start), "unterminated raw string");
925
926 if n_hashes > 0 {
927 err.note(format!(
928 "this raw string should be terminated with `\"{}`",
929 "#".repeat(n_hashes as usize)
930 ));
931 }
932
933 if let Some(possible_offset) = possible_offset {
934 let lo = start + BytePos(possible_offset);
935 let hi = lo + BytePos(found_terminators);
936 let span = self.mk_sp(lo, hi);
937 err.span_suggestion(
938 span,
939 "consider terminating the string here",
940 "#".repeat(n_hashes as usize),
941 Applicability::MaybeIncorrect,
942 );
943 }
944
945 err.emit()
946 }
947
948 fn report_unterminated_block_comment(&self, start: BytePos, doc_style: Option<DocStyle>) {
949 let msg = match doc_style {
950 Some(_) => "unterminated block doc-comment",
951 None => "unterminated block comment",
952 };
953 let last_bpos = self.pos;
954 let mut err = self.dcx().struct_span_fatal(self.mk_sp(start, last_bpos), msg);
955 err.code(E0758);
956 let mut nested_block_comment_open_idxs = vec![];
957 let mut last_nested_block_comment_idxs = None;
958 let mut content_chars = self.str_from(start).char_indices().peekable();
959
960 while let Some((idx, current_char)) = content_chars.next() {
961 match content_chars.peek() {
962 Some((_, '*')) if current_char == '/' => {
963 nested_block_comment_open_idxs.push(idx);
964 }
965 Some((_, '/')) if current_char == '*' => {
966 last_nested_block_comment_idxs =
967 nested_block_comment_open_idxs.pop().map(|open_idx| (open_idx, idx));
968 }
969 _ => {}
970 };
971 }
972
973 if let Some((nested_open_idx, nested_close_idx)) = last_nested_block_comment_idxs {
974 err.span_label(self.mk_sp(start, start + BytePos(2)), msg)
975 .span_label(
976 self.mk_sp(
977 start + BytePos(nested_open_idx as u32),
978 start + BytePos(nested_open_idx as u32 + 2),
979 ),
980 "...as last nested comment starts here, maybe you want to close this instead?",
981 )
982 .span_label(
983 self.mk_sp(
984 start + BytePos(nested_close_idx as u32),
985 start + BytePos(nested_close_idx as u32 + 2),
986 ),
987 "...and last nested comment terminates here.",
988 );
989 }
990
991 err.emit();
992 }
993
994 fn report_unknown_prefix(&self, start: BytePos) {
999 let prefix_span = self.mk_sp(start, self.pos);
1000 let prefix = self.str_from_to(start, self.pos);
1001 let expn_data = prefix_span.ctxt().outer_expn_data();
1002
1003 if expn_data.edition.at_least_rust_2021() {
1004 let sugg = if prefix == "rb" {
1006 Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
1007 } else if prefix == "rc" {
1008 Some(errors::UnknownPrefixSugg::UseCr(prefix_span))
1009 } else if expn_data.is_root() {
1010 if self.cursor.first() == '\''
1011 && let Some(start) = self.last_lifetime
1012 && self.cursor.third() != '\''
1013 && let end = self.mk_sp(self.pos, self.pos + BytePos(1))
1014 && !self.psess.source_map().is_multiline(start.until(end))
1015 {
1016 Some(errors::UnknownPrefixSugg::MeantStr { start, end })
1020 } else {
1021 Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi()))
1022 }
1023 } else {
1024 None
1025 };
1026 self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
1027 } else {
1028 self.psess.buffer_lint(
1030 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
1031 prefix_span,
1032 ast::CRATE_NODE_ID,
1033 BuiltinLintDiag::ReservedPrefix(prefix_span, prefix.to_string()),
1034 );
1035 }
1036 }
1037
1038 fn maybe_report_guarded_str(&mut self, start: BytePos, str_before: &'src str) -> TokenKind {
1045 let span = self.mk_sp(start, self.pos);
1046 let edition2024 = span.edition().at_least_rust_2024();
1047
1048 let space_pos = start + BytePos(1);
1049 let space_span = self.mk_sp(space_pos, space_pos);
1050
1051 let mut cursor = Cursor::new(str_before, FrontmatterAllowed::No);
1052
1053 let (is_string, span, unterminated) = match cursor.guarded_double_quoted_string() {
1054 Some(rustc_lexer::GuardedStr { n_hashes, terminated, token_len }) => {
1055 let end = start + BytePos(token_len);
1056 let span = self.mk_sp(start, end);
1057 let str_start = start + BytePos(n_hashes);
1058
1059 if edition2024 {
1060 self.cursor = cursor;
1061 self.pos = end;
1062 }
1063
1064 let unterminated = if terminated { None } else { Some(str_start) };
1065
1066 (true, span, unterminated)
1067 }
1068 None => {
1069 debug_assert_eq!(self.str_from_to(start, start + BytePos(2)), "##");
1071
1072 (false, span, None)
1073 }
1074 };
1075 if edition2024 {
1076 if let Some(str_start) = unterminated {
1077 self.dcx()
1079 .struct_span_fatal(
1080 self.mk_sp(str_start, self.pos),
1081 "unterminated double quote string",
1082 )
1083 .with_code(E0765)
1084 .emit()
1085 }
1086
1087 let sugg = if span.from_expansion() {
1088 None
1089 } else {
1090 Some(errors::GuardedStringSugg(space_span))
1091 };
1092
1093 let err = if is_string {
1095 self.dcx().emit_err(errors::ReservedString { span, sugg })
1096 } else {
1097 self.dcx().emit_err(errors::ReservedMultihash { span, sugg })
1098 };
1099
1100 token::Literal(token::Lit {
1101 kind: token::Err(err),
1102 symbol: self.symbol_from_to(start, self.pos),
1103 suffix: None,
1104 })
1105 } else {
1106 self.psess.buffer_lint(
1108 RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
1109 span,
1110 ast::CRATE_NODE_ID,
1111 BuiltinLintDiag::ReservedString { is_string, suggestion: space_span },
1112 );
1113
1114 self.pos = start + BytePos(1);
1117 self.cursor = Cursor::new(&str_before[1..], FrontmatterAllowed::No);
1118 token::Pound
1119 }
1120 }
1121
1122 fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
1123 self.dcx().emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
1124 }
1125
1126 fn cook_quoted(
1127 &self,
1128 mut kind: token::LitKind,
1129 mode: Mode,
1130 start: BytePos,
1131 end: BytePos,
1132 prefix_len: u32,
1133 postfix_len: u32,
1134 ) -> (token::LitKind, Symbol) {
1135 let content_start = start + BytePos(prefix_len);
1136 let content_end = end - BytePos(postfix_len);
1137 let lit_content = self.str_from_to(content_start, content_end);
1138 check_for_errors(lit_content, mode, |range, err| {
1139 let span_with_quotes = self.mk_sp(start, end);
1140 let (start, end) = (range.start as u32, range.end as u32);
1141 let lo = content_start + BytePos(start);
1142 let hi = lo + BytePos(end - start);
1143 let span = self.mk_sp(lo, hi);
1144 let is_fatal = err.is_fatal();
1145 if let Some(guar) = emit_unescape_error(
1146 self.dcx(),
1147 lit_content,
1148 span_with_quotes,
1149 span,
1150 mode,
1151 range,
1152 err,
1153 ) {
1154 assert!(is_fatal);
1155 kind = token::Err(guar);
1156 }
1157 });
1158
1159 let sym = if !matches!(kind, token::Err(_)) {
1162 Symbol::intern(lit_content)
1163 } else {
1164 self.symbol_from_to(start, end)
1165 };
1166 (kind, sym)
1167 }
1168}
1169
1170pub fn nfc_normalize(string: &str) -> Symbol {
1171 use unicode_normalization::{IsNormalized, UnicodeNormalization, is_nfc_quick};
1172 match is_nfc_quick(string.chars()) {
1173 IsNormalized::Yes => Symbol::intern(string),
1174 _ => {
1175 let normalized_str: String = string.chars().nfc().collect();
1176 Symbol::intern(&normalized_str)
1177 }
1178 }
1179}