1use diagnostics::make_errors_for_mismatched_closing_delims;
2use rustc_ast::ast::{self, AttrStyle};
3use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
4use rustc_ast::tokenstream::TokenStream;
5use rustc_ast::util::unicode::{TEXT_FLOW_CONTROL_CHARS, contains_text_flow_control_chars};
6use rustc_errors::codes::*;
7use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
8use rustc_lexer::{
9 Base, Cursor, DocStyle, FrontmatterAllowed, LiteralKind, RawStrError, is_horizontal_whitespace,
10};
11use rustc_literal_escaper::{EscapeError, Mode, check_for_errors};
12use rustc_session::lint::BuiltinLintDiag;
13use rustc_session::lint::builtin::{
14 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
15 TEXT_DIRECTION_CODEPOINT_IN_COMMENT, TEXT_DIRECTION_CODEPOINT_IN_LITERAL,
16};
17use rustc_session::parse::ParseSess;
18use rustc_span::{BytePos, Pos, Span, Symbol, sym};
19use tracing::debug;
20
21use crate::errors;
22use crate::lexer::diagnostics::TokenTreeDiagInfo;
23use crate::lexer::unicode_chars::UNICODE_ARRAY;
24
25mod diagnostics;
26mod tokentrees;
27mod unescape_error_reporting;
28mod unicode_chars;
29
30use unescape_error_reporting::{emit_unescape_error, escaped_char};
31
32#[cfg(target_pointer_width = "64")]
37rustc_data_structures::static_assert_size!(rustc_lexer::Token, 12);
38
39#[derive(Clone, Debug)]
40pub(crate) struct UnmatchedDelim {
41 pub found_delim: Option<Delimiter>,
42 pub found_span: Span,
43 pub unclosed_span: Option<Span>,
44 pub candidate_span: Option<Span>,
45}
46
47pub enum StripTokens {
49 ShebangAndFrontmatter,
51 Shebang,
56 Nothing,
61}
62
63pub(crate) fn lex_token_trees<'psess, 'src>(
64 psess: &'psess ParseSess,
65 mut src: &'src str,
66 mut start_pos: BytePos,
67 override_span: Option<Span>,
68 strip_tokens: StripTokens,
69) -> Result<TokenStream, Vec<Diag<'psess>>> {
70 match strip_tokens {
71 StripTokens::Shebang | StripTokens::ShebangAndFrontmatter => {
72 if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
73 src = &src[shebang_len..];
74 start_pos = start_pos + BytePos::from_usize(shebang_len);
75 }
76 }
77 StripTokens::Nothing => {}
78 }
79
80 let frontmatter_allowed = match strip_tokens {
81 StripTokens::ShebangAndFrontmatter => FrontmatterAllowed::Yes,
82 StripTokens::Shebang | StripTokens::Nothing => FrontmatterAllowed::No,
83 };
84
85 let cursor = Cursor::new(src, frontmatter_allowed);
86 let mut lexer = Lexer {
87 psess,
88 start_pos,
89 pos: start_pos,
90 src,
91 cursor,
92 override_span,
93 nbsp_is_whitespace: false,
94 last_lifetime: None,
95 token: Token::dummy(),
96 diag_info: TokenTreeDiagInfo::default(),
97 };
98 let res = lexer.lex_token_trees(false);
99
100 let mut unmatched_closing_delims: Vec<_> =
101 make_errors_for_mismatched_closing_delims(&lexer.diag_info.unmatched_delims, psess);
102
103 match res {
104 Ok((_open_spacing, stream)) => {
105 if unmatched_closing_delims.is_empty() {
106 Ok(stream)
107 } else {
108 Err(unmatched_closing_delims)
110 }
111 }
112 Err(errs) => {
113 unmatched_closing_delims.extend(errs);
116 Err(unmatched_closing_delims)
117 }
118 }
119}
120
121struct Lexer<'psess, 'src> {
122 psess: &'psess ParseSess,
123 start_pos: BytePos,
125 pos: BytePos,
127 src: &'src str,
129 cursor: Cursor<'src>,
131 override_span: Option<Span>,
132 nbsp_is_whitespace: bool,
136
137 last_lifetime: Option<Span>,
140
141 token: Token,
143
144 diag_info: TokenTreeDiagInfo,
145}
146
147impl<'psess, 'src> Lexer<'psess, 'src> {
148 fn dcx(&self) -> DiagCtxtHandle<'psess> {
149 self.psess.dcx()
150 }
151
152 fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
153 self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
154 }
155
156 fn next_token_from_cursor(&mut self) -> (Token, bool) {
159 let mut preceded_by_whitespace = false;
160 let mut swallow_next_invalid = 0;
161 loop {
163 let str_before = self.cursor.as_str();
164 let token = self.cursor.advance_token();
165 let start = self.pos;
166 self.pos = self.pos + BytePos(token.len);
167
168 debug!("next_token: {:?}({:?})", token.kind, self.str_from(start));
169
170 if let rustc_lexer::TokenKind::Semi
171 | rustc_lexer::TokenKind::LineComment { .. }
172 | rustc_lexer::TokenKind::BlockComment { .. }
173 | rustc_lexer::TokenKind::CloseParen
174 | rustc_lexer::TokenKind::CloseBrace
175 | rustc_lexer::TokenKind::CloseBracket = token.kind
176 {
177 self.last_lifetime = None;
180 }
181
182 let kind = match token.kind {
186 rustc_lexer::TokenKind::LineComment { doc_style } => {
187 let Some(doc_style) = doc_style else {
189 self.lint_unicode_text_flow(start);
190 preceded_by_whitespace = true;
191 continue;
192 };
193
194 let content_start = start + BytePos(3);
196 let content = self.str_from(content_start);
197 self.lint_doc_comment_unicode_text_flow(start, content);
198 self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
199 }
200 rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
201 if !terminated {
202 self.report_unterminated_block_comment(start, doc_style);
203 }
204
205 let Some(doc_style) = doc_style else {
207 self.lint_unicode_text_flow(start);
208 preceded_by_whitespace = true;
209 continue;
210 };
211
212 let content_start = start + BytePos(3);
215 let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
216 let content = self.str_from_to(content_start, content_end);
217 self.lint_doc_comment_unicode_text_flow(start, content);
218 self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
219 }
220 rustc_lexer::TokenKind::Frontmatter { has_invalid_preceding_whitespace, invalid_infostring } => {
221 self.validate_frontmatter(start, has_invalid_preceding_whitespace, invalid_infostring);
222 preceded_by_whitespace = true;
223 continue;
224 }
225 rustc_lexer::TokenKind::Whitespace => {
226 preceded_by_whitespace = true;
227 continue;
228 }
229 rustc_lexer::TokenKind::Ident => self.ident(start),
230 rustc_lexer::TokenKind::RawIdent => {
231 let sym = nfc_normalize(self.str_from(start + BytePos(2)));
232 let span = self.mk_sp(start, self.pos);
233 self.psess.symbol_gallery.insert(sym, span);
234 if !sym.can_be_raw() {
235 self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
236 }
237 self.psess.raw_identifier_spans.push(span);
238 token::Ident(sym, IdentIsRaw::Yes)
239 }
240 rustc_lexer::TokenKind::UnknownPrefix => {
241 self.report_unknown_prefix(start);
242 self.ident(start)
243 }
244 rustc_lexer::TokenKind::UnknownPrefixLifetime => {
245 self.report_unknown_prefix(start);
246 let lifetime_name = self.str_from(start);
250 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
251 let ident = Symbol::intern(lifetime_name);
252 token::Lifetime(ident, IdentIsRaw::No)
253 }
254 rustc_lexer::TokenKind::InvalidIdent
255 if !UNICODE_ARRAY.iter().any(|&(c, _, _)| {
258 let sym = self.str_from(start);
259 sym.chars().count() == 1 && c == sym.chars().next().unwrap()
260 }) =>
261 {
262 let sym = nfc_normalize(self.str_from(start));
263 let span = self.mk_sp(start, self.pos);
264 self.psess
265 .bad_unicode_identifiers
266 .borrow_mut()
267 .entry(sym)
268 .or_default()
269 .push(span);
270 token::Ident(sym, IdentIsRaw::No)
271 }
272 rustc_lexer::TokenKind::Literal {
275 kind: kind @ (LiteralKind::CStr { .. } | LiteralKind::RawCStr { .. }),
276 suffix_start: _,
277 } if !self.mk_sp(start, self.pos).edition().at_least_rust_2021() => {
278 let prefix_len = match kind {
279 LiteralKind::CStr { .. } => 1,
280 LiteralKind::RawCStr { .. } => 2,
281 _ => unreachable!(),
282 };
283
284 let lit_start = start + BytePos(prefix_len);
287 self.pos = lit_start;
288 self.cursor = Cursor::new(&str_before[prefix_len as usize..], FrontmatterAllowed::No);
289 self.report_unknown_prefix(start);
290 let prefix_span = self.mk_sp(start, lit_start);
291 return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace);
292 }
293 rustc_lexer::TokenKind::GuardedStrPrefix => {
294 self.maybe_report_guarded_str(start, str_before)
295 }
296 rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
297 let suffix_start = start + BytePos(suffix_start);
298 let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
299 let suffix = if suffix_start < self.pos {
300 let string = self.str_from(suffix_start);
301 if string == "_" {
302 self.dcx().emit_err(errors::UnderscoreLiteralSuffix {
303 span: self.mk_sp(suffix_start, self.pos),
304 });
305 None
306 } else {
307 Some(Symbol::intern(string))
308 }
309 } else {
310 None
311 };
312 self.lint_literal_unicode_text_flow(symbol, kind, self.mk_sp(start, self.pos), "literal");
313 token::Literal(token::Lit { kind, symbol, suffix })
314 }
315 rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
316 let lifetime_name = self.str_from(start);
320 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
321 if starts_with_number {
322 let span = self.mk_sp(start, self.pos);
323 self.dcx()
324 .struct_err("lifetimes cannot start with a number")
325 .with_span(span)
326 .stash(span, StashKey::LifetimeIsChar);
327 }
328 let ident = Symbol::intern(lifetime_name);
329 token::Lifetime(ident, IdentIsRaw::No)
330 }
331 rustc_lexer::TokenKind::RawLifetime => {
332 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
333
334 let ident_start = start + BytePos(3);
335 let prefix_span = self.mk_sp(start, ident_start);
336
337 if prefix_span.at_least_rust_2021() {
338 if self.cursor.as_str().starts_with('\'') {
344 let lit_span = self.mk_sp(start, self.pos + BytePos(1));
345 let contents = self.str_from_to(start + BytePos(1), self.pos);
346 emit_unescape_error(
347 self.dcx(),
348 contents,
349 lit_span,
350 lit_span,
351 Mode::Char,
352 0..contents.len(),
353 EscapeError::MoreThanOneChar,
354 )
355 .expect("expected error");
356 }
357
358 let span = self.mk_sp(start, self.pos);
359
360 let lifetime_name_without_tick =
361 Symbol::intern(&self.str_from(ident_start));
362 if !lifetime_name_without_tick.can_be_raw() {
363 self.dcx().emit_err(
364 errors::CannotBeRawLifetime {
365 span,
366 ident: lifetime_name_without_tick
367 }
368 );
369 }
370
371 let mut lifetime_name =
373 String::with_capacity(lifetime_name_without_tick.as_str().len() + 1);
374 lifetime_name.push('\'');
375 lifetime_name += lifetime_name_without_tick.as_str();
376 let sym = Symbol::intern(&lifetime_name);
377
378 self.psess.raw_identifier_spans.push(span);
380
381 token::Lifetime(sym, IdentIsRaw::Yes)
382 } else {
383 self.psess.buffer_lint(
385 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
386 prefix_span,
387 ast::CRATE_NODE_ID,
388 BuiltinLintDiag::RawPrefix(prefix_span),
389 );
390
391 let lt_start = start + BytePos(2);
393 self.pos = lt_start;
394 self.cursor = Cursor::new(&str_before[2 as usize..], FrontmatterAllowed::No);
395
396 let lifetime_name = self.str_from(start);
397 let ident = Symbol::intern(lifetime_name);
398 token::Lifetime(ident, IdentIsRaw::No)
399 }
400 }
401 rustc_lexer::TokenKind::Semi => token::Semi,
402 rustc_lexer::TokenKind::Comma => token::Comma,
403 rustc_lexer::TokenKind::Dot => token::Dot,
404 rustc_lexer::TokenKind::OpenParen => token::OpenParen,
405 rustc_lexer::TokenKind::CloseParen => token::CloseParen,
406 rustc_lexer::TokenKind::OpenBrace => token::OpenBrace,
407 rustc_lexer::TokenKind::CloseBrace => token::CloseBrace,
408 rustc_lexer::TokenKind::OpenBracket => token::OpenBracket,
409 rustc_lexer::TokenKind::CloseBracket => token::CloseBracket,
410 rustc_lexer::TokenKind::At => token::At,
411 rustc_lexer::TokenKind::Pound => token::Pound,
412 rustc_lexer::TokenKind::Tilde => token::Tilde,
413 rustc_lexer::TokenKind::Question => token::Question,
414 rustc_lexer::TokenKind::Colon => token::Colon,
415 rustc_lexer::TokenKind::Dollar => token::Dollar,
416 rustc_lexer::TokenKind::Eq => token::Eq,
417 rustc_lexer::TokenKind::Bang => token::Bang,
418 rustc_lexer::TokenKind::Lt => token::Lt,
419 rustc_lexer::TokenKind::Gt => token::Gt,
420 rustc_lexer::TokenKind::Minus => token::Minus,
421 rustc_lexer::TokenKind::And => token::And,
422 rustc_lexer::TokenKind::Or => token::Or,
423 rustc_lexer::TokenKind::Plus => token::Plus,
424 rustc_lexer::TokenKind::Star => token::Star,
425 rustc_lexer::TokenKind::Slash => token::Slash,
426 rustc_lexer::TokenKind::Caret => token::Caret,
427 rustc_lexer::TokenKind::Percent => token::Percent,
428
429 rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
430 if swallow_next_invalid > 0 {
432 swallow_next_invalid -= 1;
433 continue;
434 }
435 let mut it = self.str_from_to_end(start).chars();
436 let c = it.next().unwrap();
437 if c == '\u{00a0}' {
438 if self.nbsp_is_whitespace {
442 preceded_by_whitespace = true;
443 continue;
444 }
445 self.nbsp_is_whitespace = true;
446 }
447 let repeats = it.take_while(|c1| *c1 == c).count();
448 let (token, sugg) =
455 unicode_chars::check_for_substitution(self, start, c, repeats + 1);
456 self.dcx().emit_err(errors::UnknownTokenStart {
457 span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
458 escaped: escaped_char(c),
459 sugg,
460 null: if c == '\x00' { Some(errors::UnknownTokenNull) } else { None },
461 repeat: if repeats > 0 {
462 swallow_next_invalid = repeats;
463 Some(errors::UnknownTokenRepeat { repeats })
464 } else {
465 None
466 },
467 });
468
469 if let Some(token) = token {
470 token
471 } else {
472 preceded_by_whitespace = true;
473 continue;
474 }
475 }
476 rustc_lexer::TokenKind::Eof => token::Eof,
477 };
478 let span = self.mk_sp(start, self.pos);
479 return (Token::new(kind, span), preceded_by_whitespace);
480 }
481 }
482
483 fn ident(&self, start: BytePos) -> TokenKind {
484 let sym = nfc_normalize(self.str_from(start));
485 let span = self.mk_sp(start, self.pos);
486 self.psess.symbol_gallery.insert(sym, span);
487 token::Ident(sym, IdentIsRaw::No)
488 }
489
490 fn lint_unicode_text_flow(&self, start: BytePos) {
493 let content_start = start + BytePos(2);
495 let content = self.str_from(content_start);
496 if contains_text_flow_control_chars(content) {
497 let span = self.mk_sp(start, self.pos);
498 self.psess.buffer_lint(
499 TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
500 span,
501 ast::CRATE_NODE_ID,
502 BuiltinLintDiag::UnicodeTextFlow(span, content.to_string()),
503 );
504 }
505 }
506
507 fn lint_doc_comment_unicode_text_flow(&mut self, start: BytePos, content: &str) {
508 if contains_text_flow_control_chars(content) {
509 self.report_text_direction_codepoint(
510 content,
511 self.mk_sp(start, self.pos),
512 0,
513 false,
514 "doc comment",
515 );
516 }
517 }
518
519 fn lint_literal_unicode_text_flow(
520 &mut self,
521 text: Symbol,
522 lit_kind: token::LitKind,
523 span: Span,
524 label: &'static str,
525 ) {
526 if !contains_text_flow_control_chars(text.as_str()) {
527 return;
528 }
529 let (padding, point_at_inner_spans) = match lit_kind {
530 token::LitKind::Str | token::LitKind::Char => (1, true),
532 token::LitKind::CStr => (2, true),
534 token::LitKind::StrRaw(n) => (n as u32 + 2, true),
536 token::LitKind::CStrRaw(n) => (n as u32 + 3, true),
538 token::LitKind::Err(_) => return,
540 _ => (0, false),
542 };
543 self.report_text_direction_codepoint(
544 text.as_str(),
545 span,
546 padding,
547 point_at_inner_spans,
548 label,
549 );
550 }
551
552 fn report_text_direction_codepoint(
553 &self,
554 text: &str,
555 span: Span,
556 padding: u32,
557 point_at_inner_spans: bool,
558 label: &str,
559 ) {
560 let spans: Vec<_> = text
562 .char_indices()
563 .filter_map(|(i, c)| {
564 TEXT_FLOW_CONTROL_CHARS.contains(&c).then(|| {
565 let lo = span.lo() + BytePos(i as u32 + padding);
566 (c, span.with_lo(lo).with_hi(lo + BytePos(c.len_utf8() as u32)))
567 })
568 })
569 .collect();
570
571 let label = label.to_string();
572 let count = spans.len();
573 let labels = point_at_inner_spans
574 .then_some(errors::HiddenUnicodeCodepointsDiagLabels { spans: spans.clone() });
575 let sub = if point_at_inner_spans && !spans.is_empty() {
576 errors::HiddenUnicodeCodepointsDiagSub::Escape { spans }
577 } else {
578 errors::HiddenUnicodeCodepointsDiagSub::NoEscape { spans }
579 };
580
581 self.psess.buffer_lint(
582 TEXT_DIRECTION_CODEPOINT_IN_LITERAL,
583 span,
584 ast::CRATE_NODE_ID,
585 errors::HiddenUnicodeCodepointsDiag { label, count, span_label: span, labels, sub },
586 );
587 }
588
589 fn validate_frontmatter(
590 &self,
591 start: BytePos,
592 has_invalid_preceding_whitespace: bool,
593 invalid_infostring: bool,
594 ) {
595 let s = self.str_from(start);
596 let real_start = s.find("---").unwrap();
597 let frontmatter_opening_pos = BytePos(real_start as u32) + start;
598 let s_new = &s[real_start..];
599 let within = s_new.trim_start_matches('-');
600 let len_opening = s_new.len() - within.len();
601
602 let frontmatter_opening_end_pos = frontmatter_opening_pos + BytePos(len_opening as u32);
603 if has_invalid_preceding_whitespace {
604 let line_start =
605 BytePos(s[..real_start].rfind("\n").map_or(0, |i| i as u32 + 1)) + start;
606 let span = self.mk_sp(line_start, frontmatter_opening_end_pos);
607 let label_span = self.mk_sp(line_start, frontmatter_opening_pos);
608 self.dcx().emit_err(errors::FrontmatterInvalidOpeningPrecedingWhitespace {
609 span,
610 note_span: label_span,
611 });
612 }
613
614 if invalid_infostring {
615 let line_end = s[real_start..].find('\n').unwrap_or(s[real_start..].len());
616 let span = self.mk_sp(
617 frontmatter_opening_end_pos,
618 frontmatter_opening_pos + BytePos(line_end as u32),
619 );
620 self.dcx().emit_err(errors::FrontmatterInvalidInfostring { span });
621 }
622
623 let last_line_start = within.rfind('\n').map_or(0, |i| i + 1);
624 let last_line = &within[last_line_start..];
625 let last_line_trimmed = last_line.trim_start_matches(is_horizontal_whitespace);
626 let last_line_start_pos = frontmatter_opening_end_pos + BytePos(last_line_start as u32);
627
628 let frontmatter_span = self.mk_sp(frontmatter_opening_pos, self.pos);
629 self.psess.gated_spans.gate(sym::frontmatter, frontmatter_span);
630
631 if !last_line_trimmed.starts_with("---") {
632 let label_span = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos);
633 self.dcx().emit_err(errors::FrontmatterUnclosed {
634 span: frontmatter_span,
635 note_span: label_span,
636 });
637 return;
638 }
639
640 if last_line_trimmed.len() != last_line.len() {
641 let line_end = last_line_start_pos + BytePos(last_line.len() as u32);
642 let span = self.mk_sp(last_line_start_pos, line_end);
643 let whitespace_end =
644 last_line_start_pos + BytePos((last_line.len() - last_line_trimmed.len()) as u32);
645 let label_span = self.mk_sp(last_line_start_pos, whitespace_end);
646 self.dcx().emit_err(errors::FrontmatterInvalidClosingPrecedingWhitespace {
647 span,
648 note_span: label_span,
649 });
650 }
651
652 let rest = last_line_trimmed.trim_start_matches('-');
653 let len_close = last_line_trimmed.len() - rest.len();
654 if len_close != len_opening {
655 let span = self.mk_sp(frontmatter_opening_pos, self.pos);
656 let opening = self.mk_sp(frontmatter_opening_pos, frontmatter_opening_end_pos);
657 let last_line_close_pos = last_line_start_pos + BytePos(len_close as u32);
658 let close = self.mk_sp(last_line_start_pos, last_line_close_pos);
659 self.dcx().emit_err(errors::FrontmatterLengthMismatch {
660 span,
661 opening,
662 close,
663 len_opening,
664 len_close,
665 });
666 }
667
668 if u8::try_from(len_opening).is_err() {
670 self.dcx().emit_err(errors::FrontmatterTooManyDashes { len_opening });
671 }
672
673 if !rest.trim_matches(is_horizontal_whitespace).is_empty() {
674 let span = self.mk_sp(last_line_start_pos, self.pos);
675 self.dcx().emit_err(errors::FrontmatterExtraCharactersAfterClose { span });
676 }
677 }
678
679 fn cook_doc_comment(
680 &self,
681 content_start: BytePos,
682 content: &str,
683 comment_kind: CommentKind,
684 doc_style: DocStyle,
685 ) -> TokenKind {
686 if content.contains('\r') {
687 for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') {
688 let span = self.mk_sp(
689 content_start + BytePos(idx as u32),
690 content_start + BytePos(idx as u32 + 1),
691 );
692 let block = matches!(comment_kind, CommentKind::Block);
693 self.dcx().emit_err(errors::CrDocComment { span, block });
694 }
695 }
696
697 let attr_style = match doc_style {
698 DocStyle::Outer => AttrStyle::Outer,
699 DocStyle::Inner => AttrStyle::Inner,
700 };
701
702 token::DocComment(comment_kind, attr_style, Symbol::intern(content))
703 }
704
705 fn cook_lexer_literal(
706 &self,
707 start: BytePos,
708 end: BytePos,
709 kind: rustc_lexer::LiteralKind,
710 ) -> (token::LitKind, Symbol) {
711 match kind {
712 rustc_lexer::LiteralKind::Char { terminated } => {
713 if !terminated {
714 let mut err = self
715 .dcx()
716 .struct_span_fatal(self.mk_sp(start, end), "unterminated character literal")
717 .with_code(E0762);
718 if let Some(lt_sp) = self.last_lifetime {
719 err.multipart_suggestion(
720 "if you meant to write a string literal, use double quotes",
721 vec![
722 (lt_sp, "\"".to_string()),
723 (self.mk_sp(start, start + BytePos(1)), "\"".to_string()),
724 ],
725 Applicability::MaybeIncorrect,
726 );
727 }
728 err.emit()
729 }
730 self.cook_quoted(token::Char, Mode::Char, start, end, 1, 1) }
732 rustc_lexer::LiteralKind::Byte { terminated } => {
733 if !terminated {
734 self.dcx()
735 .struct_span_fatal(
736 self.mk_sp(start + BytePos(1), end),
737 "unterminated byte constant",
738 )
739 .with_code(E0763)
740 .emit()
741 }
742 self.cook_quoted(token::Byte, Mode::Byte, start, end, 2, 1) }
744 rustc_lexer::LiteralKind::Str { terminated } => {
745 if !terminated {
746 self.dcx()
747 .struct_span_fatal(
748 self.mk_sp(start, end),
749 "unterminated double quote string",
750 )
751 .with_code(E0765)
752 .emit()
753 }
754 self.cook_quoted(token::Str, Mode::Str, start, end, 1, 1) }
756 rustc_lexer::LiteralKind::ByteStr { terminated } => {
757 if !terminated {
758 self.dcx()
759 .struct_span_fatal(
760 self.mk_sp(start + BytePos(1), end),
761 "unterminated double quote byte string",
762 )
763 .with_code(E0766)
764 .emit()
765 }
766 self.cook_quoted(token::ByteStr, Mode::ByteStr, start, end, 2, 1)
767 }
769 rustc_lexer::LiteralKind::CStr { terminated } => {
770 if !terminated {
771 self.dcx()
772 .struct_span_fatal(
773 self.mk_sp(start + BytePos(1), end),
774 "unterminated C string",
775 )
776 .with_code(E0767)
777 .emit()
778 }
779 self.cook_quoted(token::CStr, Mode::CStr, start, end, 2, 1) }
781 rustc_lexer::LiteralKind::RawStr { n_hashes } => {
782 if let Some(n_hashes) = n_hashes {
783 let n = u32::from(n_hashes);
784 let kind = token::StrRaw(n_hashes);
785 self.cook_quoted(kind, Mode::RawStr, start, end, 2 + n, 1 + n)
786 } else {
788 self.report_raw_str_error(start, 1);
789 }
790 }
791 rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
792 if let Some(n_hashes) = n_hashes {
793 let n = u32::from(n_hashes);
794 let kind = token::ByteStrRaw(n_hashes);
795 self.cook_quoted(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n)
796 } else {
798 self.report_raw_str_error(start, 2);
799 }
800 }
801 rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
802 if let Some(n_hashes) = n_hashes {
803 let n = u32::from(n_hashes);
804 let kind = token::CStrRaw(n_hashes);
805 self.cook_quoted(kind, Mode::RawCStr, start, end, 3 + n, 1 + n)
806 } else {
808 self.report_raw_str_error(start, 2);
809 }
810 }
811 rustc_lexer::LiteralKind::Int { base, empty_int } => {
812 let mut kind = token::Integer;
813 if empty_int {
814 let span = self.mk_sp(start, end);
815 let guar = self.dcx().emit_err(errors::NoDigitsLiteral { span });
816 kind = token::Err(guar);
817 } else if matches!(base, Base::Binary | Base::Octal) {
818 let base = base as u32;
819 let s = self.str_from_to(start + BytePos(2), end);
820 for (idx, c) in s.char_indices() {
821 let span = self.mk_sp(
822 start + BytePos::from_usize(2 + idx),
823 start + BytePos::from_usize(2 + idx + c.len_utf8()),
824 );
825 if c != '_' && c.to_digit(base).is_none() {
826 let guar =
827 self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
828 kind = token::Err(guar);
829 }
830 }
831 }
832 (kind, self.symbol_from_to(start, end))
833 }
834 rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
835 let mut kind = token::Float;
836 if empty_exponent {
837 let span = self.mk_sp(start, self.pos);
838 let guar = self.dcx().emit_err(errors::EmptyExponentFloat { span });
839 kind = token::Err(guar);
840 }
841 let base = match base {
842 Base::Hexadecimal => Some("hexadecimal"),
843 Base::Octal => Some("octal"),
844 Base::Binary => Some("binary"),
845 _ => None,
846 };
847 if let Some(base) = base {
848 let span = self.mk_sp(start, end);
849 let guar =
850 self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
851 kind = token::Err(guar)
852 }
853 (kind, self.symbol_from_to(start, end))
854 }
855 }
856 }
857
858 #[inline]
859 fn src_index(&self, pos: BytePos) -> usize {
860 (pos - self.start_pos).to_usize()
861 }
862
863 fn str_from(&self, start: BytePos) -> &'src str {
866 self.str_from_to(start, self.pos)
867 }
868
869 fn symbol_from_to(&self, start: BytePos, end: BytePos) -> Symbol {
871 debug!("taking an ident from {:?} to {:?}", start, end);
872 Symbol::intern(self.str_from_to(start, end))
873 }
874
875 fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
877 &self.src[self.src_index(start)..self.src_index(end)]
878 }
879
880 fn str_from_to_end(&self, start: BytePos) -> &'src str {
882 &self.src[self.src_index(start)..]
883 }
884
885 fn report_raw_str_error(&self, start: BytePos, prefix_len: u32) -> ! {
886 match rustc_lexer::validate_raw_str(self.str_from(start), prefix_len) {
887 Err(RawStrError::InvalidStarter { bad_char }) => {
888 self.report_non_started_raw_string(start, bad_char)
889 }
890 Err(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self
891 .report_unterminated_raw_string(start, expected, possible_terminator_offset, found),
892 Err(RawStrError::TooManyDelimiters { found }) => {
893 self.report_too_many_hashes(start, found)
894 }
895 Ok(()) => panic!("no error found for supposedly invalid raw string literal"),
896 }
897 }
898
899 fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
900 self.dcx()
901 .struct_span_fatal(
902 self.mk_sp(start, self.pos),
903 format!(
904 "found invalid character; only `#` is allowed in raw string delimitation: {}",
905 escaped_char(bad_char)
906 ),
907 )
908 .emit()
909 }
910
911 fn report_unterminated_raw_string(
912 &self,
913 start: BytePos,
914 n_hashes: u32,
915 possible_offset: Option<u32>,
916 found_terminators: u32,
917 ) -> ! {
918 let mut err =
919 self.dcx().struct_span_fatal(self.mk_sp(start, start), "unterminated raw string");
920 err.code(E0748);
921 err.span_label(self.mk_sp(start, start), "unterminated raw string");
922
923 if n_hashes > 0 {
924 err.note(format!(
925 "this raw string should be terminated with `\"{}`",
926 "#".repeat(n_hashes as usize)
927 ));
928 }
929
930 if let Some(possible_offset) = possible_offset {
931 let lo = start + BytePos(possible_offset);
932 let hi = lo + BytePos(found_terminators);
933 let span = self.mk_sp(lo, hi);
934 err.span_suggestion(
935 span,
936 "consider terminating the string here",
937 "#".repeat(n_hashes as usize),
938 Applicability::MaybeIncorrect,
939 );
940 }
941
942 err.emit()
943 }
944
945 fn report_unterminated_block_comment(&self, start: BytePos, doc_style: Option<DocStyle>) {
946 let msg = match doc_style {
947 Some(_) => "unterminated block doc-comment",
948 None => "unterminated block comment",
949 };
950 let last_bpos = self.pos;
951 let mut err = self.dcx().struct_span_fatal(self.mk_sp(start, last_bpos), msg);
952 err.code(E0758);
953 let mut nested_block_comment_open_idxs = vec![];
954 let mut last_nested_block_comment_idxs = None;
955 let mut content_chars = self.str_from(start).char_indices().peekable();
956
957 while let Some((idx, current_char)) = content_chars.next() {
958 match content_chars.peek() {
959 Some((_, '*')) if current_char == '/' => {
960 nested_block_comment_open_idxs.push(idx);
961 }
962 Some((_, '/')) if current_char == '*' => {
963 last_nested_block_comment_idxs =
964 nested_block_comment_open_idxs.pop().map(|open_idx| (open_idx, idx));
965 }
966 _ => {}
967 };
968 }
969
970 if let Some((nested_open_idx, nested_close_idx)) = last_nested_block_comment_idxs {
971 err.span_label(self.mk_sp(start, start + BytePos(2)), msg)
972 .span_label(
973 self.mk_sp(
974 start + BytePos(nested_open_idx as u32),
975 start + BytePos(nested_open_idx as u32 + 2),
976 ),
977 "...as last nested comment starts here, maybe you want to close this instead?",
978 )
979 .span_label(
980 self.mk_sp(
981 start + BytePos(nested_close_idx as u32),
982 start + BytePos(nested_close_idx as u32 + 2),
983 ),
984 "...and last nested comment terminates here.",
985 );
986 }
987
988 err.emit();
989 }
990
991 fn report_unknown_prefix(&self, start: BytePos) {
996 let prefix_span = self.mk_sp(start, self.pos);
997 let prefix = self.str_from_to(start, self.pos);
998 let expn_data = prefix_span.ctxt().outer_expn_data();
999
1000 if expn_data.edition.at_least_rust_2021() {
1001 let sugg = if prefix == "rb" {
1003 Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
1004 } else if prefix == "rc" {
1005 Some(errors::UnknownPrefixSugg::UseCr(prefix_span))
1006 } else if expn_data.is_root() {
1007 if self.cursor.first() == '\''
1008 && let Some(start) = self.last_lifetime
1009 && self.cursor.third() != '\''
1010 && let end = self.mk_sp(self.pos, self.pos + BytePos(1))
1011 && !self.psess.source_map().is_multiline(start.until(end))
1012 {
1013 Some(errors::UnknownPrefixSugg::MeantStr { start, end })
1017 } else {
1018 Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi()))
1019 }
1020 } else {
1021 None
1022 };
1023 self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
1024 } else {
1025 self.psess.buffer_lint(
1027 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
1028 prefix_span,
1029 ast::CRATE_NODE_ID,
1030 BuiltinLintDiag::ReservedPrefix(prefix_span, prefix.to_string()),
1031 );
1032 }
1033 }
1034
1035 fn maybe_report_guarded_str(&mut self, start: BytePos, str_before: &'src str) -> TokenKind {
1042 let span = self.mk_sp(start, self.pos);
1043 let edition2024 = span.edition().at_least_rust_2024();
1044
1045 let space_pos = start + BytePos(1);
1046 let space_span = self.mk_sp(space_pos, space_pos);
1047
1048 let mut cursor = Cursor::new(str_before, FrontmatterAllowed::No);
1049
1050 let (is_string, span, unterminated) = match cursor.guarded_double_quoted_string() {
1051 Some(rustc_lexer::GuardedStr { n_hashes, terminated, token_len }) => {
1052 let end = start + BytePos(token_len);
1053 let span = self.mk_sp(start, end);
1054 let str_start = start + BytePos(n_hashes);
1055
1056 if edition2024 {
1057 self.cursor = cursor;
1058 self.pos = end;
1059 }
1060
1061 let unterminated = if terminated { None } else { Some(str_start) };
1062
1063 (true, span, unterminated)
1064 }
1065 None => {
1066 debug_assert_eq!(self.str_from_to(start, start + BytePos(2)), "##");
1068
1069 (false, span, None)
1070 }
1071 };
1072 if edition2024 {
1073 if let Some(str_start) = unterminated {
1074 self.dcx()
1076 .struct_span_fatal(
1077 self.mk_sp(str_start, self.pos),
1078 "unterminated double quote string",
1079 )
1080 .with_code(E0765)
1081 .emit()
1082 }
1083
1084 let sugg = if span.from_expansion() {
1085 None
1086 } else {
1087 Some(errors::GuardedStringSugg(space_span))
1088 };
1089
1090 let err = if is_string {
1092 self.dcx().emit_err(errors::ReservedString { span, sugg })
1093 } else {
1094 self.dcx().emit_err(errors::ReservedMultihash { span, sugg })
1095 };
1096
1097 token::Literal(token::Lit {
1098 kind: token::Err(err),
1099 symbol: self.symbol_from_to(start, self.pos),
1100 suffix: None,
1101 })
1102 } else {
1103 self.psess.buffer_lint(
1105 RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
1106 span,
1107 ast::CRATE_NODE_ID,
1108 BuiltinLintDiag::ReservedString { is_string, suggestion: space_span },
1109 );
1110
1111 self.pos = start + BytePos(1);
1114 self.cursor = Cursor::new(&str_before[1..], FrontmatterAllowed::No);
1115 token::Pound
1116 }
1117 }
1118
1119 fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
1120 self.dcx().emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
1121 }
1122
1123 fn cook_quoted(
1124 &self,
1125 mut kind: token::LitKind,
1126 mode: Mode,
1127 start: BytePos,
1128 end: BytePos,
1129 prefix_len: u32,
1130 postfix_len: u32,
1131 ) -> (token::LitKind, Symbol) {
1132 let content_start = start + BytePos(prefix_len);
1133 let content_end = end - BytePos(postfix_len);
1134 let lit_content = self.str_from_to(content_start, content_end);
1135 check_for_errors(lit_content, mode, |range, err| {
1136 let span_with_quotes = self.mk_sp(start, end);
1137 let (start, end) = (range.start as u32, range.end as u32);
1138 let lo = content_start + BytePos(start);
1139 let hi = lo + BytePos(end - start);
1140 let span = self.mk_sp(lo, hi);
1141 let is_fatal = err.is_fatal();
1142 if let Some(guar) = emit_unescape_error(
1143 self.dcx(),
1144 lit_content,
1145 span_with_quotes,
1146 span,
1147 mode,
1148 range,
1149 err,
1150 ) {
1151 assert!(is_fatal);
1152 kind = token::Err(guar);
1153 }
1154 });
1155
1156 let sym = if !matches!(kind, token::Err(_)) {
1159 Symbol::intern(lit_content)
1160 } else {
1161 self.symbol_from_to(start, end)
1162 };
1163 (kind, sym)
1164 }
1165}
1166
1167pub fn nfc_normalize(string: &str) -> Symbol {
1168 use unicode_normalization::{IsNormalized, UnicodeNormalization, is_nfc_quick};
1169 match is_nfc_quick(string.chars()) {
1170 IsNormalized::Yes => Symbol::intern(string),
1171 _ => {
1172 let normalized_str: String = string.chars().nfc().collect();
1173 Symbol::intern(&normalized_str)
1174 }
1175 }
1176}