1use std::ops::Range;
2
3use rustc_ast::ast::{self, AttrStyle};
4use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind};
5use rustc_ast::tokenstream::TokenStream;
6use rustc_ast::util::unicode::contains_text_flow_control_chars;
7use rustc_errors::codes::*;
8use rustc_errors::{Applicability, Diag, DiagCtxtHandle, StashKey};
9use rustc_lexer::unescape::{self, EscapeError, Mode};
10use rustc_lexer::{Base, Cursor, DocStyle, LiteralKind, RawStrError};
11use rustc_session::lint::BuiltinLintDiag;
12use rustc_session::lint::builtin::{
13 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX, RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
14 TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
15};
16use rustc_session::parse::ParseSess;
17use rustc_span::{BytePos, Pos, Span, Symbol};
18use tracing::debug;
19
20use crate::lexer::diagnostics::TokenTreeDiagInfo;
21use crate::lexer::unicode_chars::UNICODE_ARRAY;
22use crate::{errors, make_unclosed_delims_error};
23
24mod diagnostics;
25mod tokentrees;
26mod unescape_error_reporting;
27mod unicode_chars;
28
29use unescape_error_reporting::{emit_unescape_error, escaped_char};
30
31#[cfg(target_pointer_width = "64")]
36rustc_data_structures::static_assert_size!(rustc_lexer::Token, 12);
37
38#[derive(Clone, Debug)]
39pub(crate) struct UnmatchedDelim {
40 pub found_delim: Option<Delimiter>,
41 pub found_span: Span,
42 pub unclosed_span: Option<Span>,
43 pub candidate_span: Option<Span>,
44}
45
46pub(crate) fn lex_token_trees<'psess, 'src>(
47 psess: &'psess ParseSess,
48 mut src: &'src str,
49 mut start_pos: BytePos,
50 override_span: Option<Span>,
51) -> Result<TokenStream, Vec<Diag<'psess>>> {
52 if let Some(shebang_len) = rustc_lexer::strip_shebang(src) {
54 src = &src[shebang_len..];
55 start_pos = start_pos + BytePos::from_usize(shebang_len);
56 }
57
58 let cursor = Cursor::new(src);
59 let mut lexer = Lexer {
60 psess,
61 start_pos,
62 pos: start_pos,
63 src,
64 cursor,
65 override_span,
66 nbsp_is_whitespace: false,
67 last_lifetime: None,
68 token: Token::dummy(),
69 diag_info: TokenTreeDiagInfo::default(),
70 };
71 let res = lexer.lex_token_trees(false);
72
73 let mut unmatched_delims: Vec<_> = lexer
74 .diag_info
75 .unmatched_delims
76 .into_iter()
77 .filter_map(|unmatched_delim| make_unclosed_delims_error(unmatched_delim, psess))
78 .collect();
79
80 match res {
81 Ok((_open_spacing, stream)) => {
82 if unmatched_delims.is_empty() {
83 Ok(stream)
84 } else {
85 Err(unmatched_delims)
87 }
88 }
89 Err(errs) => {
90 unmatched_delims.extend(errs);
93 Err(unmatched_delims)
94 }
95 }
96}
97
98struct Lexer<'psess, 'src> {
99 psess: &'psess ParseSess,
100 start_pos: BytePos,
102 pos: BytePos,
104 src: &'src str,
106 cursor: Cursor<'src>,
108 override_span: Option<Span>,
109 nbsp_is_whitespace: bool,
113
114 last_lifetime: Option<Span>,
117
118 token: Token,
120
121 diag_info: TokenTreeDiagInfo,
122}
123
124impl<'psess, 'src> Lexer<'psess, 'src> {
125 fn dcx(&self) -> DiagCtxtHandle<'psess> {
126 self.psess.dcx()
127 }
128
129 fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
130 self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
131 }
132
133 fn next_token_from_cursor(&mut self) -> (Token, bool) {
136 let mut preceded_by_whitespace = false;
137 let mut swallow_next_invalid = 0;
138 loop {
140 let str_before = self.cursor.as_str();
141 let token = self.cursor.advance_token();
142 let start = self.pos;
143 self.pos = self.pos + BytePos(token.len);
144
145 debug!("next_token: {:?}({:?})", token.kind, self.str_from(start));
146
147 if let rustc_lexer::TokenKind::Semi
148 | rustc_lexer::TokenKind::LineComment { .. }
149 | rustc_lexer::TokenKind::BlockComment { .. }
150 | rustc_lexer::TokenKind::CloseParen
151 | rustc_lexer::TokenKind::CloseBrace
152 | rustc_lexer::TokenKind::CloseBracket = token.kind
153 {
154 self.last_lifetime = None;
157 }
158
159 let kind = match token.kind {
163 rustc_lexer::TokenKind::LineComment { doc_style } => {
164 let Some(doc_style) = doc_style else {
166 self.lint_unicode_text_flow(start);
167 preceded_by_whitespace = true;
168 continue;
169 };
170
171 let content_start = start + BytePos(3);
173 let content = self.str_from(content_start);
174 self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style)
175 }
176 rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => {
177 if !terminated {
178 self.report_unterminated_block_comment(start, doc_style);
179 }
180
181 let Some(doc_style) = doc_style else {
183 self.lint_unicode_text_flow(start);
184 preceded_by_whitespace = true;
185 continue;
186 };
187
188 let content_start = start + BytePos(3);
191 let content_end = self.pos - BytePos(if terminated { 2 } else { 0 });
192 let content = self.str_from_to(content_start, content_end);
193 self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style)
194 }
195 rustc_lexer::TokenKind::Whitespace => {
196 preceded_by_whitespace = true;
197 continue;
198 }
199 rustc_lexer::TokenKind::Ident => self.ident(start),
200 rustc_lexer::TokenKind::RawIdent => {
201 let sym = nfc_normalize(self.str_from(start + BytePos(2)));
202 let span = self.mk_sp(start, self.pos);
203 self.psess.symbol_gallery.insert(sym, span);
204 if !sym.can_be_raw() {
205 self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym });
206 }
207 self.psess.raw_identifier_spans.push(span);
208 token::Ident(sym, IdentIsRaw::Yes)
209 }
210 rustc_lexer::TokenKind::UnknownPrefix => {
211 self.report_unknown_prefix(start);
212 self.ident(start)
213 }
214 rustc_lexer::TokenKind::UnknownPrefixLifetime => {
215 self.report_unknown_prefix(start);
216 let lifetime_name = self.str_from(start);
220 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
221 let ident = Symbol::intern(lifetime_name);
222 token::Lifetime(ident, IdentIsRaw::No)
223 }
224 rustc_lexer::TokenKind::InvalidIdent
225 if !UNICODE_ARRAY.iter().any(|&(c, _, _)| {
228 let sym = self.str_from(start);
229 sym.chars().count() == 1 && c == sym.chars().next().unwrap()
230 }) =>
231 {
232 let sym = nfc_normalize(self.str_from(start));
233 let span = self.mk_sp(start, self.pos);
234 self.psess
235 .bad_unicode_identifiers
236 .borrow_mut()
237 .entry(sym)
238 .or_default()
239 .push(span);
240 token::Ident(sym, IdentIsRaw::No)
241 }
242 rustc_lexer::TokenKind::Literal {
245 kind: kind @ (LiteralKind::CStr { .. } | LiteralKind::RawCStr { .. }),
246 suffix_start: _,
247 } if !self.mk_sp(start, self.pos).edition().at_least_rust_2021() => {
248 let prefix_len = match kind {
249 LiteralKind::CStr { .. } => 1,
250 LiteralKind::RawCStr { .. } => 2,
251 _ => unreachable!(),
252 };
253
254 let lit_start = start + BytePos(prefix_len);
257 self.pos = lit_start;
258 self.cursor = Cursor::new(&str_before[prefix_len as usize..]);
259
260 self.report_unknown_prefix(start);
261 let prefix_span = self.mk_sp(start, lit_start);
262 return (Token::new(self.ident(start), prefix_span), preceded_by_whitespace);
263 }
264 rustc_lexer::TokenKind::GuardedStrPrefix => {
265 self.maybe_report_guarded_str(start, str_before)
266 }
267 rustc_lexer::TokenKind::Literal { kind, suffix_start } => {
268 let suffix_start = start + BytePos(suffix_start);
269 let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind);
270 let suffix = if suffix_start < self.pos {
271 let string = self.str_from(suffix_start);
272 if string == "_" {
273 self.dcx().emit_err(errors::UnderscoreLiteralSuffix {
274 span: self.mk_sp(suffix_start, self.pos),
275 });
276 None
277 } else {
278 Some(Symbol::intern(string))
279 }
280 } else {
281 None
282 };
283 token::Literal(token::Lit { kind, symbol, suffix })
284 }
285 rustc_lexer::TokenKind::Lifetime { starts_with_number } => {
286 let lifetime_name = self.str_from(start);
290 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
291 if starts_with_number {
292 let span = self.mk_sp(start, self.pos);
293 self.dcx()
294 .struct_err("lifetimes cannot start with a number")
295 .with_span(span)
296 .stash(span, StashKey::LifetimeIsChar);
297 }
298 let ident = Symbol::intern(lifetime_name);
299 token::Lifetime(ident, IdentIsRaw::No)
300 }
301 rustc_lexer::TokenKind::RawLifetime => {
302 self.last_lifetime = Some(self.mk_sp(start, start + BytePos(1)));
303
304 let ident_start = start + BytePos(3);
305 let prefix_span = self.mk_sp(start, ident_start);
306
307 if prefix_span.at_least_rust_2021() {
308 if self.cursor.as_str().starts_with('\'') {
314 let lit_span = self.mk_sp(start, self.pos + BytePos(1));
315 let contents = self.str_from_to(start + BytePos(1), self.pos);
316 emit_unescape_error(
317 self.dcx(),
318 contents,
319 lit_span,
320 lit_span,
321 Mode::Char,
322 0..contents.len(),
323 EscapeError::MoreThanOneChar,
324 )
325 .expect("expected error");
326 }
327
328 let span = self.mk_sp(start, self.pos);
329
330 let lifetime_name_without_tick =
331 Symbol::intern(&self.str_from(ident_start));
332 if !lifetime_name_without_tick.can_be_raw() {
333 self.dcx().emit_err(
334 errors::CannotBeRawLifetime {
335 span,
336 ident: lifetime_name_without_tick
337 }
338 );
339 }
340
341 let mut lifetime_name =
343 String::with_capacity(lifetime_name_without_tick.as_str().len() + 1);
344 lifetime_name.push('\'');
345 lifetime_name += lifetime_name_without_tick.as_str();
346 let sym = Symbol::intern(&lifetime_name);
347
348 self.psess.raw_identifier_spans.push(span);
350
351 token::Lifetime(sym, IdentIsRaw::Yes)
352 } else {
353 self.psess.buffer_lint(
355 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
356 prefix_span,
357 ast::CRATE_NODE_ID,
358 BuiltinLintDiag::RawPrefix(prefix_span),
359 );
360
361 let lt_start = start + BytePos(2);
363 self.pos = lt_start;
364 self.cursor = Cursor::new(&str_before[2 as usize..]);
365
366 let lifetime_name = self.str_from(start);
367 let ident = Symbol::intern(lifetime_name);
368 token::Lifetime(ident, IdentIsRaw::No)
369 }
370 }
371 rustc_lexer::TokenKind::Semi => token::Semi,
372 rustc_lexer::TokenKind::Comma => token::Comma,
373 rustc_lexer::TokenKind::Dot => token::Dot,
374 rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
375 rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
376 rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
377 rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
378 rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
379 rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
380 rustc_lexer::TokenKind::At => token::At,
381 rustc_lexer::TokenKind::Pound => token::Pound,
382 rustc_lexer::TokenKind::Tilde => token::Tilde,
383 rustc_lexer::TokenKind::Question => token::Question,
384 rustc_lexer::TokenKind::Colon => token::Colon,
385 rustc_lexer::TokenKind::Dollar => token::Dollar,
386 rustc_lexer::TokenKind::Eq => token::Eq,
387 rustc_lexer::TokenKind::Bang => token::Not,
388 rustc_lexer::TokenKind::Lt => token::Lt,
389 rustc_lexer::TokenKind::Gt => token::Gt,
390 rustc_lexer::TokenKind::Minus => token::BinOp(token::Minus),
391 rustc_lexer::TokenKind::And => token::BinOp(token::And),
392 rustc_lexer::TokenKind::Or => token::BinOp(token::Or),
393 rustc_lexer::TokenKind::Plus => token::BinOp(token::Plus),
394 rustc_lexer::TokenKind::Star => token::BinOp(token::Star),
395 rustc_lexer::TokenKind::Slash => token::BinOp(token::Slash),
396 rustc_lexer::TokenKind::Caret => token::BinOp(token::Caret),
397 rustc_lexer::TokenKind::Percent => token::BinOp(token::Percent),
398
399 rustc_lexer::TokenKind::Unknown | rustc_lexer::TokenKind::InvalidIdent => {
400 if swallow_next_invalid > 0 {
402 swallow_next_invalid -= 1;
403 continue;
404 }
405 let mut it = self.str_from_to_end(start).chars();
406 let c = it.next().unwrap();
407 if c == '\u{00a0}' {
408 if self.nbsp_is_whitespace {
412 preceded_by_whitespace = true;
413 continue;
414 }
415 self.nbsp_is_whitespace = true;
416 }
417 let repeats = it.take_while(|c1| *c1 == c).count();
418 let (token, sugg) =
425 unicode_chars::check_for_substitution(self, start, c, repeats + 1);
426 self.dcx().emit_err(errors::UnknownTokenStart {
427 span: self.mk_sp(start, self.pos + Pos::from_usize(repeats * c.len_utf8())),
428 escaped: escaped_char(c),
429 sugg,
430 null: if c == '\x00' { Some(errors::UnknownTokenNull) } else { None },
431 repeat: if repeats > 0 {
432 swallow_next_invalid = repeats;
433 Some(errors::UnknownTokenRepeat { repeats })
434 } else {
435 None
436 },
437 });
438
439 if let Some(token) = token {
440 token
441 } else {
442 preceded_by_whitespace = true;
443 continue;
444 }
445 }
446 rustc_lexer::TokenKind::Eof => token::Eof,
447 };
448 let span = self.mk_sp(start, self.pos);
449 return (Token::new(kind, span), preceded_by_whitespace);
450 }
451 }
452
453 fn ident(&self, start: BytePos) -> TokenKind {
454 let sym = nfc_normalize(self.str_from(start));
455 let span = self.mk_sp(start, self.pos);
456 self.psess.symbol_gallery.insert(sym, span);
457 token::Ident(sym, IdentIsRaw::No)
458 }
459
460 fn lint_unicode_text_flow(&self, start: BytePos) {
463 let content_start = start + BytePos(2);
465 let content = self.str_from(content_start);
466 if contains_text_flow_control_chars(content) {
467 let span = self.mk_sp(start, self.pos);
468 self.psess.buffer_lint(
469 TEXT_DIRECTION_CODEPOINT_IN_COMMENT,
470 span,
471 ast::CRATE_NODE_ID,
472 BuiltinLintDiag::UnicodeTextFlow(span, content.to_string()),
473 );
474 }
475 }
476
477 fn cook_doc_comment(
478 &self,
479 content_start: BytePos,
480 content: &str,
481 comment_kind: CommentKind,
482 doc_style: DocStyle,
483 ) -> TokenKind {
484 if content.contains('\r') {
485 for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') {
486 let span = self.mk_sp(
487 content_start + BytePos(idx as u32),
488 content_start + BytePos(idx as u32 + 1),
489 );
490 let block = matches!(comment_kind, CommentKind::Block);
491 self.dcx().emit_err(errors::CrDocComment { span, block });
492 }
493 }
494
495 let attr_style = match doc_style {
496 DocStyle::Outer => AttrStyle::Outer,
497 DocStyle::Inner => AttrStyle::Inner,
498 };
499
500 token::DocComment(comment_kind, attr_style, Symbol::intern(content))
501 }
502
503 fn cook_lexer_literal(
504 &self,
505 start: BytePos,
506 end: BytePos,
507 kind: rustc_lexer::LiteralKind,
508 ) -> (token::LitKind, Symbol) {
509 match kind {
510 rustc_lexer::LiteralKind::Char { terminated } => {
511 if !terminated {
512 let mut err = self
513 .dcx()
514 .struct_span_fatal(self.mk_sp(start, end), "unterminated character literal")
515 .with_code(E0762);
516 if let Some(lt_sp) = self.last_lifetime {
517 err.multipart_suggestion(
518 "if you meant to write a string literal, use double quotes",
519 vec![
520 (lt_sp, "\"".to_string()),
521 (self.mk_sp(start, start + BytePos(1)), "\"".to_string()),
522 ],
523 Applicability::MaybeIncorrect,
524 );
525 }
526 err.emit()
527 }
528 self.cook_unicode(token::Char, Mode::Char, start, end, 1, 1) }
530 rustc_lexer::LiteralKind::Byte { terminated } => {
531 if !terminated {
532 self.dcx()
533 .struct_span_fatal(
534 self.mk_sp(start + BytePos(1), end),
535 "unterminated byte constant",
536 )
537 .with_code(E0763)
538 .emit()
539 }
540 self.cook_unicode(token::Byte, Mode::Byte, start, end, 2, 1) }
542 rustc_lexer::LiteralKind::Str { terminated } => {
543 if !terminated {
544 self.dcx()
545 .struct_span_fatal(
546 self.mk_sp(start, end),
547 "unterminated double quote string",
548 )
549 .with_code(E0765)
550 .emit()
551 }
552 self.cook_unicode(token::Str, Mode::Str, start, end, 1, 1) }
554 rustc_lexer::LiteralKind::ByteStr { terminated } => {
555 if !terminated {
556 self.dcx()
557 .struct_span_fatal(
558 self.mk_sp(start + BytePos(1), end),
559 "unterminated double quote byte string",
560 )
561 .with_code(E0766)
562 .emit()
563 }
564 self.cook_unicode(token::ByteStr, Mode::ByteStr, start, end, 2, 1) }
566 rustc_lexer::LiteralKind::CStr { terminated } => {
567 if !terminated {
568 self.dcx()
569 .struct_span_fatal(
570 self.mk_sp(start + BytePos(1), end),
571 "unterminated C string",
572 )
573 .with_code(E0767)
574 .emit()
575 }
576 self.cook_mixed(token::CStr, Mode::CStr, start, end, 2, 1) }
578 rustc_lexer::LiteralKind::RawStr { n_hashes } => {
579 if let Some(n_hashes) = n_hashes {
580 let n = u32::from(n_hashes);
581 let kind = token::StrRaw(n_hashes);
582 self.cook_unicode(kind, Mode::RawStr, start, end, 2 + n, 1 + n) } else {
584 self.report_raw_str_error(start, 1);
585 }
586 }
587 rustc_lexer::LiteralKind::RawByteStr { n_hashes } => {
588 if let Some(n_hashes) = n_hashes {
589 let n = u32::from(n_hashes);
590 let kind = token::ByteStrRaw(n_hashes);
591 self.cook_unicode(kind, Mode::RawByteStr, start, end, 3 + n, 1 + n) } else {
593 self.report_raw_str_error(start, 2);
594 }
595 }
596 rustc_lexer::LiteralKind::RawCStr { n_hashes } => {
597 if let Some(n_hashes) = n_hashes {
598 let n = u32::from(n_hashes);
599 let kind = token::CStrRaw(n_hashes);
600 self.cook_unicode(kind, Mode::RawCStr, start, end, 3 + n, 1 + n) } else {
602 self.report_raw_str_error(start, 2);
603 }
604 }
605 rustc_lexer::LiteralKind::Int { base, empty_int } => {
606 let mut kind = token::Integer;
607 if empty_int {
608 let span = self.mk_sp(start, end);
609 let guar = self.dcx().emit_err(errors::NoDigitsLiteral { span });
610 kind = token::Err(guar);
611 } else if matches!(base, Base::Binary | Base::Octal) {
612 let base = base as u32;
613 let s = self.str_from_to(start + BytePos(2), end);
614 for (idx, c) in s.char_indices() {
615 let span = self.mk_sp(
616 start + BytePos::from_usize(2 + idx),
617 start + BytePos::from_usize(2 + idx + c.len_utf8()),
618 );
619 if c != '_' && c.to_digit(base).is_none() {
620 let guar =
621 self.dcx().emit_err(errors::InvalidDigitLiteral { span, base });
622 kind = token::Err(guar);
623 }
624 }
625 }
626 (kind, self.symbol_from_to(start, end))
627 }
628 rustc_lexer::LiteralKind::Float { base, empty_exponent } => {
629 let mut kind = token::Float;
630 if empty_exponent {
631 let span = self.mk_sp(start, self.pos);
632 let guar = self.dcx().emit_err(errors::EmptyExponentFloat { span });
633 kind = token::Err(guar);
634 }
635 let base = match base {
636 Base::Hexadecimal => Some("hexadecimal"),
637 Base::Octal => Some("octal"),
638 Base::Binary => Some("binary"),
639 _ => None,
640 };
641 if let Some(base) = base {
642 let span = self.mk_sp(start, end);
643 let guar =
644 self.dcx().emit_err(errors::FloatLiteralUnsupportedBase { span, base });
645 kind = token::Err(guar)
646 }
647 (kind, self.symbol_from_to(start, end))
648 }
649 }
650 }
651
652 #[inline]
653 fn src_index(&self, pos: BytePos) -> usize {
654 (pos - self.start_pos).to_usize()
655 }
656
657 fn str_from(&self, start: BytePos) -> &'src str {
660 self.str_from_to(start, self.pos)
661 }
662
663 fn symbol_from_to(&self, start: BytePos, end: BytePos) -> Symbol {
665 debug!("taking an ident from {:?} to {:?}", start, end);
666 Symbol::intern(self.str_from_to(start, end))
667 }
668
669 fn str_from_to(&self, start: BytePos, end: BytePos) -> &'src str {
671 &self.src[self.src_index(start)..self.src_index(end)]
672 }
673
674 fn str_from_to_end(&self, start: BytePos) -> &'src str {
676 &self.src[self.src_index(start)..]
677 }
678
679 fn report_raw_str_error(&self, start: BytePos, prefix_len: u32) -> ! {
680 match rustc_lexer::validate_raw_str(self.str_from(start), prefix_len) {
681 Err(RawStrError::InvalidStarter { bad_char }) => {
682 self.report_non_started_raw_string(start, bad_char)
683 }
684 Err(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self
685 .report_unterminated_raw_string(start, expected, possible_terminator_offset, found),
686 Err(RawStrError::TooManyDelimiters { found }) => {
687 self.report_too_many_hashes(start, found)
688 }
689 Ok(()) => panic!("no error found for supposedly invalid raw string literal"),
690 }
691 }
692
693 fn report_non_started_raw_string(&self, start: BytePos, bad_char: char) -> ! {
694 self.dcx()
695 .struct_span_fatal(
696 self.mk_sp(start, self.pos),
697 format!(
698 "found invalid character; only `#` is allowed in raw string delimitation: {}",
699 escaped_char(bad_char)
700 ),
701 )
702 .emit()
703 }
704
705 fn report_unterminated_raw_string(
706 &self,
707 start: BytePos,
708 n_hashes: u32,
709 possible_offset: Option<u32>,
710 found_terminators: u32,
711 ) -> ! {
712 let mut err =
713 self.dcx().struct_span_fatal(self.mk_sp(start, start), "unterminated raw string");
714 err.code(E0748);
715 err.span_label(self.mk_sp(start, start), "unterminated raw string");
716
717 if n_hashes > 0 {
718 err.note(format!(
719 "this raw string should be terminated with `\"{}`",
720 "#".repeat(n_hashes as usize)
721 ));
722 }
723
724 if let Some(possible_offset) = possible_offset {
725 let lo = start + BytePos(possible_offset);
726 let hi = lo + BytePos(found_terminators);
727 let span = self.mk_sp(lo, hi);
728 err.span_suggestion(
729 span,
730 "consider terminating the string here",
731 "#".repeat(n_hashes as usize),
732 Applicability::MaybeIncorrect,
733 );
734 }
735
736 err.emit()
737 }
738
739 fn report_unterminated_block_comment(&self, start: BytePos, doc_style: Option<DocStyle>) {
740 let msg = match doc_style {
741 Some(_) => "unterminated block doc-comment",
742 None => "unterminated block comment",
743 };
744 let last_bpos = self.pos;
745 let mut err = self.dcx().struct_span_fatal(self.mk_sp(start, last_bpos), msg);
746 err.code(E0758);
747 let mut nested_block_comment_open_idxs = vec![];
748 let mut last_nested_block_comment_idxs = None;
749 let mut content_chars = self.str_from(start).char_indices().peekable();
750
751 while let Some((idx, current_char)) = content_chars.next() {
752 match content_chars.peek() {
753 Some((_, '*')) if current_char == '/' => {
754 nested_block_comment_open_idxs.push(idx);
755 }
756 Some((_, '/')) if current_char == '*' => {
757 last_nested_block_comment_idxs =
758 nested_block_comment_open_idxs.pop().map(|open_idx| (open_idx, idx));
759 }
760 _ => {}
761 };
762 }
763
764 if let Some((nested_open_idx, nested_close_idx)) = last_nested_block_comment_idxs {
765 err.span_label(self.mk_sp(start, start + BytePos(2)), msg)
766 .span_label(
767 self.mk_sp(
768 start + BytePos(nested_open_idx as u32),
769 start + BytePos(nested_open_idx as u32 + 2),
770 ),
771 "...as last nested comment starts here, maybe you want to close this instead?",
772 )
773 .span_label(
774 self.mk_sp(
775 start + BytePos(nested_close_idx as u32),
776 start + BytePos(nested_close_idx as u32 + 2),
777 ),
778 "...and last nested comment terminates here.",
779 );
780 }
781
782 err.emit();
783 }
784
785 fn report_unknown_prefix(&self, start: BytePos) {
790 let prefix_span = self.mk_sp(start, self.pos);
791 let prefix = self.str_from_to(start, self.pos);
792
793 let expn_data = prefix_span.ctxt().outer_expn_data();
794
795 if expn_data.edition.at_least_rust_2021() {
796 let sugg = if prefix == "rb" {
798 Some(errors::UnknownPrefixSugg::UseBr(prefix_span))
799 } else if expn_data.is_root() {
800 if self.cursor.first() == '\''
801 && let Some(start) = self.last_lifetime
802 && self.cursor.third() != '\''
803 && let end = self.mk_sp(self.pos, self.pos + BytePos(1))
804 && !self.psess.source_map().is_multiline(start.until(end))
805 {
806 Some(errors::UnknownPrefixSugg::MeantStr { start, end })
810 } else {
811 Some(errors::UnknownPrefixSugg::Whitespace(prefix_span.shrink_to_hi()))
812 }
813 } else {
814 None
815 };
816 self.dcx().emit_err(errors::UnknownPrefix { span: prefix_span, prefix, sugg });
817 } else {
818 self.psess.buffer_lint(
820 RUST_2021_PREFIXES_INCOMPATIBLE_SYNTAX,
821 prefix_span,
822 ast::CRATE_NODE_ID,
823 BuiltinLintDiag::ReservedPrefix(prefix_span, prefix.to_string()),
824 );
825 }
826 }
827
828 fn maybe_report_guarded_str(&mut self, start: BytePos, str_before: &'src str) -> TokenKind {
835 let span = self.mk_sp(start, self.pos);
836 let edition2024 = span.edition().at_least_rust_2024();
837
838 let space_pos = start + BytePos(1);
839 let space_span = self.mk_sp(space_pos, space_pos);
840
841 let mut cursor = Cursor::new(str_before);
842
843 let (is_string, span, unterminated) = match cursor.guarded_double_quoted_string() {
844 Some(rustc_lexer::GuardedStr { n_hashes, terminated, token_len }) => {
845 let end = start + BytePos(token_len);
846 let span = self.mk_sp(start, end);
847 let str_start = start + BytePos(n_hashes);
848
849 if edition2024 {
850 self.cursor = cursor;
851 self.pos = end;
852 }
853
854 let unterminated = if terminated { None } else { Some(str_start) };
855
856 (true, span, unterminated)
857 }
858 None => {
859 debug_assert_eq!(self.str_from_to(start, start + BytePos(2)), "##");
861
862 (false, span, None)
863 }
864 };
865 if edition2024 {
866 if let Some(str_start) = unterminated {
867 self.dcx()
869 .struct_span_fatal(
870 self.mk_sp(str_start, self.pos),
871 "unterminated double quote string",
872 )
873 .with_code(E0765)
874 .emit()
875 }
876
877 let sugg = if span.from_expansion() {
878 None
879 } else {
880 Some(errors::GuardedStringSugg(space_span))
881 };
882
883 let err = if is_string {
885 self.dcx().emit_err(errors::ReservedString { span, sugg })
886 } else {
887 self.dcx().emit_err(errors::ReservedMultihash { span, sugg })
888 };
889
890 token::Literal(token::Lit {
891 kind: token::Err(err),
892 symbol: self.symbol_from_to(start, self.pos),
893 suffix: None,
894 })
895 } else {
896 self.psess.buffer_lint(
898 RUST_2024_GUARDED_STRING_INCOMPATIBLE_SYNTAX,
899 span,
900 ast::CRATE_NODE_ID,
901 BuiltinLintDiag::ReservedString { is_string, suggestion: space_span },
902 );
903
904 self.pos = start + BytePos(1);
907 self.cursor = Cursor::new(&str_before[1..]);
908 token::Pound
909 }
910 }
911
912 fn report_too_many_hashes(&self, start: BytePos, num: u32) -> ! {
913 self.dcx().emit_fatal(errors::TooManyHashes { span: self.mk_sp(start, self.pos), num });
914 }
915
916 fn cook_common(
917 &self,
918 mut kind: token::LitKind,
919 mode: Mode,
920 start: BytePos,
921 end: BytePos,
922 prefix_len: u32,
923 postfix_len: u32,
924 unescape: fn(&str, Mode, &mut dyn FnMut(Range<usize>, Result<(), EscapeError>)),
925 ) -> (token::LitKind, Symbol) {
926 let content_start = start + BytePos(prefix_len);
927 let content_end = end - BytePos(postfix_len);
928 let lit_content = self.str_from_to(content_start, content_end);
929 unescape(lit_content, mode, &mut |range, result| {
930 if let Err(err) = result {
932 let span_with_quotes = self.mk_sp(start, end);
933 let (start, end) = (range.start as u32, range.end as u32);
934 let lo = content_start + BytePos(start);
935 let hi = lo + BytePos(end - start);
936 let span = self.mk_sp(lo, hi);
937 let is_fatal = err.is_fatal();
938 if let Some(guar) = emit_unescape_error(
939 self.dcx(),
940 lit_content,
941 span_with_quotes,
942 span,
943 mode,
944 range,
945 err,
946 ) {
947 assert!(is_fatal);
948 kind = token::Err(guar);
949 }
950 }
951 });
952
953 let sym = if !matches!(kind, token::Err(_)) {
956 Symbol::intern(lit_content)
957 } else {
958 self.symbol_from_to(start, end)
959 };
960 (kind, sym)
961 }
962
963 fn cook_unicode(
964 &self,
965 kind: token::LitKind,
966 mode: Mode,
967 start: BytePos,
968 end: BytePos,
969 prefix_len: u32,
970 postfix_len: u32,
971 ) -> (token::LitKind, Symbol) {
972 self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
973 unescape::unescape_unicode(src, mode, &mut |span, result| {
974 callback(span, result.map(drop))
975 })
976 })
977 }
978
979 fn cook_mixed(
980 &self,
981 kind: token::LitKind,
982 mode: Mode,
983 start: BytePos,
984 end: BytePos,
985 prefix_len: u32,
986 postfix_len: u32,
987 ) -> (token::LitKind, Symbol) {
988 self.cook_common(kind, mode, start, end, prefix_len, postfix_len, |src, mode, callback| {
989 unescape::unescape_mixed(src, mode, &mut |span, result| {
990 callback(span, result.map(drop))
991 })
992 })
993 }
994}
995
996pub fn nfc_normalize(string: &str) -> Symbol {
997 use unicode_normalization::{IsNormalized, UnicodeNormalization, is_nfc_quick};
998 match is_nfc_quick(string.chars()) {
999 IsNormalized::Yes => Symbol::intern(string),
1000 _ => {
1001 let normalized_str: String = string.chars().nfc().collect();
1002 Symbol::intern(&normalized_str)
1003 }
1004 }
1005}