Skip to main content

dada_parser/
lib.rs

1#![doc = include_str!("../docs/overview.md")]
2
3use salsa::Update;
4use tokenizer::{
5    Delimiter, Keyword, Skipped, Token, TokenKind, is_op_char,
6    operator::{self, Op},
7    tokenize,
8};
9
10use dada_ir_ast::{
11    ast::{AstModule, DeferredParse, SpanVec, SpannedIdentifier},
12    diagnostic::{Diagnostic, Level, Reported},
13    inputs::SourceFile,
14    span::{Anchor, Offset, Span, Spanned},
15};
16
17use dada_ir_ast::Db;
18
19mod classes;
20mod expr;
21mod functions;
22mod generics;
23mod miscellaneous;
24mod module_body;
25pub mod prelude;
26mod square_bracket_args;
27mod tokenizer;
28mod types;
29
30#[salsa::tracked]
31impl prelude::SourceFileParse for SourceFile {
32    #[salsa::tracked]
33    fn parse(self, db: &dyn crate::Db) -> AstModule<'_> {
34        let anchor = Anchor::SourceFile(self);
35        if let Err(message) = self.contents(db) {
36            Diagnostic::new(db, Level::Error, self.span(db), message).report(db);
37        }
38        let text = self.contents_if_ok(db);
39        let tokens = tokenizer::tokenize(db, anchor, Offset::ZERO, text);
40        let mut parser = Parser::new(db, anchor, &tokens);
41        let module = AstModule::eat(db, &mut parser).expect("parsing a module is infallible");
42        parser.into_diagnostics().into_iter().for_each(|d| {
43            let Reported(_) = d.report(db);
44        });
45        module
46    }
47}
48
49struct Parser<'token, 'db> {
50    db: &'db dyn crate::Db,
51
52    /// Input tokens
53    tokens: &'token [Token<'token, 'db>],
54
55    /// Next token (if any) in the token list
56    next_token: usize,
57
58    /// Span of the last consumed token; starts as the span of the anchor
59    last_span: Span<'db>,
60
61    /// Additional diagnostics that were reported by parsers.
62    /// Used when we are able to partially parse something and recover.
63    /// These need to be reported to the user eventually.
64    /// They are stored in the parser to support speculative parsing.
65    diagnostics: Vec<Diagnostic>,
66}
67
68impl<'token, 'db> Parser<'token, 'db> {
69    pub fn new(
70        db: &'db dyn crate::Db,
71        anchor: Anchor<'db>,
72        tokens: &'token [Token<'token, 'db>],
73    ) -> Self {
74        let mut this = Self {
75            db,
76            tokens,
77            next_token: 0,
78            last_span: Span {
79                anchor,
80                start: Offset::ZERO,
81                end: Offset::ZERO,
82            },
83            diagnostics: Vec::new(),
84        };
85
86        this.eat_errors();
87
88        this
89    }
90
91    /// Parse the contents of `deferred_parse` using `op` in the context of the given `anchor`.
92    /// See [`DeferredParse`][] for an explanation of when/why we use deferred parsing.
93    #[track_caller]
94    pub fn deferred<T>(
95        db: &'db dyn crate::Db,
96        anchor: impl Into<Anchor<'db>>,
97        deferred_parse: &'db DeferredParse<'db>,
98        op: impl FnOnce(Parser<'_, 'db>) -> T,
99    ) -> T {
100        let anchor = anchor.into();
101
102        let input_offset = {
103            // To get the spans right, we sometimes need to apply an offset.
104            // This is a bit subtle. There are two cases to consider.
105            if deferred_parse.span.anchor == anchor {
106                // CASE 1: the easy case.
107                //
108                // In this case, the deferred parse span has the same
109                // anchor as the new contents. This occurs for square bracket
110                // args, e.g.,
111                //
112                // ```notrust
113                // fn foo() { bar[String]() }
114                //                ------ deferred parse of this
115                // ```
116                //
117                // In this case, the offsets don't need to be adjusted,
118                // as the deferred parse is relative to the `fn foo`
119                // and the new contents are as well.
120                Offset::ZERO
121            } else if let anchor_span = anchor.span(db)
122                && let grandanchor = anchor_span.anchor
123                && deferred_parse.span.anchor == grandanchor
124            {
125                // CASE 2:
126                //
127                // In case 2, the deferred parse is anchored in the
128                // same context as as the item which will become
129                // the new anchor. This occurs for example with a class:
130                //
131                // ```notrust
132                // mod { class Foo { ... } }
133                //     ^           ------- span of the deferred parse
134                //     | ----------------- span of the class (will be the new anchor)
135                //     | ---------- offset we will apply below
136                //     start of the anchor span
137                // ```
138                //
139                // Here, `anchor` will be the class, and its span will be anchored
140                // to the module (we call that the `grandanchor`).
141                //
142                // The deferred parse is *also* anchored to the module, but we
143                // want to parse its contents to produce contents whose offsets
144                // are relative to the start of the *class*. Therefore we
145                // apply an offset.
146                deferred_parse.span.start - anchor_span.start
147            } else {
148                panic!(
149                    "Deferred-parse of `{contents:?}` had anchor (a) which had no known relationship to new anchor (b)\
150                    \n  (a) = {a:#?}\
151                    \n  (b) = {b:#?}",
152                    contents = deferred_parse.contents,
153                    a = deferred_parse.span.anchor,
154                    b = anchor,
155                )
156            }
157        };
158
159        // Tokenize the contents of the deferred parse using `anchor`
160        let tokens = tokenize(db, anchor, input_offset, &deferred_parse.contents);
161
162        op(Parser::new(db, anchor, &tokens))
163    }
164
165    /// Top-level parsing function: parses zero or more instances of T and reports any errors.
166    pub fn parse_many_and_report_diagnostics<T>(
167        mut self,
168        db: &'db dyn crate::Db,
169    ) -> SpanVec<'db, T::Output>
170    where
171        T: Parse<'db>,
172    {
173        let start_span = self.peek_span();
174
175        let result = match T::eat_many(db, &mut self) {
176            Ok(v) => v,
177            Err(err) => {
178                self.push_diagnostic(err.into_diagnostic(db));
179                SpanVec {
180                    span: start_span.to(db, self.last_span()),
181                    values: vec![],
182                }
183            }
184        };
185
186        for diagnostic in self.into_diagnostics() {
187            diagnostic.report(db);
188        }
189
190        result
191    }
192
193    /// Record a diagnostic, indicating that parsing recovered from an error.
194    pub fn push_diagnostic(&mut self, diagnostic: Diagnostic) {
195        self.diagnostics.push(diagnostic);
196    }
197
198    /// Take all diagnostics from another parser (e.g., one parsing a delimited set of tokens).
199    pub fn take_diagnostics(&mut self, parser: Parser<'_, 'db>) {
200        self.diagnostics.extend(parser.into_diagnostics());
201    }
202
203    /// Complete parsing and convert the parser into the resulting diagnostics (errors).
204    ///
205    /// Reports an error if there are any unconsumed tokens.
206    pub fn into_diagnostics(mut self) -> Vec<Diagnostic> {
207        if self.peek().is_some() {
208            let diagnostic = self.illformed(Expected::EOF).into_diagnostic(self.db);
209            self.push_diagnostic(diagnostic);
210
211            // consume all remaining tokens lest there is a tokenizer error in there
212            while self.peek().is_some() {
213                self.eat_next_token().unwrap();
214            }
215        }
216
217        self.diagnostics
218    }
219
220    /// Forks this parser into a split parser at the same point
221    /// with a fresh set of diagnostics. Used for speculation.
222    fn fork(&self) -> Self {
223        Self {
224            db: self.db,
225            tokens: self.tokens,
226            next_token: self.next_token,
227            last_span: self.last_span,
228            diagnostics: Vec::new(),
229        }
230    }
231
232    /// Eats any pending error tokens and adds them to the diagnostic list.
233    /// Does not adjust `last_span`.
234    ///
235    /// This implements **eager error consumption** - error tokens from the tokenizer
236    /// are immediately converted to diagnostics rather than disrupting normal parsing.
237    /// Called automatically after each `eat_next_token()` to maintain clean token streams.
238    ///
239    /// This pattern allows parsing to continue after tokenizer errors, enabling
240    /// better error recovery and multiple error reporting in a single pass.
241    fn eat_errors(&mut self) {
242        while let Some(Token {
243            kind: TokenKind::Error(diagnostic),
244            ..
245        }) = self.tokens.get(self.next_token)
246        {
247            self.push_diagnostic(diagnostic.clone());
248            self.next_token += 1;
249        }
250    }
251
252    /// Advance by one token, returning `Err` if there is no current token.
253    /// After advancing, also eagerly eats any error tokens.
254    pub fn eat_next_token(&mut self) -> Result<(), ParseFail<'db>> {
255        if self.next_token < self.tokens.len() {
256            assert!(self.next_token < self.tokens.len());
257            let span = self.tokens[self.next_token].span;
258            assert_eq!(span.anchor, self.last_span.anchor);
259            self.last_span = span;
260            self.next_token += 1;
261            self.eat_errors();
262            Ok(())
263        } else {
264            Err(self.illformed(Expected::MoreTokens))
265        }
266    }
267
268    /// Peek at the next token, returning None if there is none.
269    /// Implicitly advances past error tokens.
270    /// Does not consume the token returned.
271    pub fn peek(&mut self) -> Option<&Token<'token, 'db>> {
272        let token = self.tokens.get(self.next_token)?;
273
274        assert!(!matches!(
275            token,
276            Token {
277                kind: TokenKind::Error(_),
278                ..
279            },
280        ));
281
282        Some(token)
283    }
284
285    /// Span of the last consumed token.
286    pub fn last_span(&self) -> Span<'db> {
287        self.last_span
288    }
289
290    /// Span of the next token in the input (or the end of the final token, if there are no more tokens)
291    pub fn peek_span(&mut self) -> Span<'db> {
292        let s = match self.peek() {
293            Some(token) => token.span,
294            None => self.last_span.at_end(),
295        };
296        assert_eq!(s.anchor, self.last_span.anchor);
297        s
298    }
299
300    /// Create a parse error because the next token is not what we expected.
301    pub fn illformed(&mut self, expected: Expected) -> ParseFail<'db> {
302        if let Expected::EOF = expected {
303            return ParseFail::Expected(self.peek_span(), expected);
304        }
305
306        // The way we prefer to report this is by finding the *previous* token
307        // and reported that we expected it to be followed by something.
308        let mut previous_token = self.next_token;
309        while previous_token != 0 {
310            previous_token -= 1;
311            if let TokenKind::Error(_) = self.tokens[previous_token].kind {
312                continue;
313            }
314
315            return ParseFail::ExpectedTokenToBeFollowedBy(
316                self.tokens[previous_token].span,
317                self.peek_span(),
318                expected,
319            );
320        }
321
322        // Could not find a suitable previous token. Oh well.
323        ParseFail::Expected(self.peek_span(), expected)
324    }
325
326    pub fn eat_keyword(&mut self, kw: Keyword) -> Result<Span<'db>, ParseFail<'db>> {
327        if let Some(&Token {
328            kind: TokenKind::Keyword(kw1),
329            skipped: _,
330            span,
331        }) = self.peek()
332            && kw == kw1
333        {
334            self.eat_next_token().unwrap();
335            return Ok(span);
336        }
337        Err(self.illformed(Expected::Keyword(kw)))
338    }
339
340    pub fn eat_id(&mut self) -> Result<SpannedIdentifier<'db>, ParseFail<'db>> {
341        if let Some(&Token {
342            kind: TokenKind::Identifier(id),
343            span,
344            skipped: _,
345        }) = self.peek()
346        {
347            self.eat_next_token().unwrap();
348            return Ok(SpannedIdentifier { span, id });
349        }
350        Err(self.illformed(Expected::Identifier))
351    }
352
353    pub fn eat_op(&mut self, op: Op) -> Result<Span<'db>, ParseFail<'db>> {
354        const MAX_LEN: usize = 5;
355        assert!(op.len() < MAX_LEN, "unexpectedly long operator");
356
357        if cfg!(debug_assertions)
358            && let Some(invalid_ch) = op.iter().find(|&&ch| !is_op_char(ch))
359        {
360            debug_assert!(
361                false,
362                "eat_op({op:?}): `{invalid_ch:?}` is not a valid operator"
363            );
364        }
365
366        // Check that next character is an operator character.
367        let Some(&Token {
368            kind: TokenKind::OpChar(ch0),
369            span: start_span,
370            skipped: _,
371        }) = self.peek()
372        else {
373            return Err(self.illformed(Expected::Operator(op)));
374        };
375
376        // Now look for subsequent operator tokens.
377        // Accumulate them into the buffer so long as we are not skipping any whitespace or encountering errors.
378        let mut buffer: [char; MAX_LEN] = [' '; MAX_LEN];
379        buffer[0] = ch0;
380        let mut buffer_len = 1;
381        while let Some(&Token {
382            kind: TokenKind::OpChar(ch1),
383            skipped: None,
384            ..
385        }) = self.tokens.get(self.next_token + buffer_len)
386        {
387            buffer[buffer_len] = ch1;
388            buffer_len += 1;
389        }
390
391        if op.len() != buffer_len {
392            return Err(self.illformed(Expected::Operator(op)));
393        }
394
395        for i in 0..buffer_len {
396            if op[i] != buffer[i] {
397                return Err(self.illformed(Expected::Operator(op)));
398            }
399        }
400
401        for _ in 0..buffer_len {
402            self.eat_next_token().unwrap();
403        }
404        Ok(start_span.to(self.db, self.last_span()))
405    }
406
407    /// Returns a deferred parse of the next delimited token.
408    /// If this returns `Err`, then nothing has been consumed.
409    pub fn defer_delimited(
410        &mut self,
411        delimiter: Delimiter,
412    ) -> Result<DeferredParse<'db>, ParseFail<'db>> {
413        let text = self.eat_delimited(delimiter)?;
414        Ok(DeferredParse {
415            span: self.last_span(),
416            contents: text.to_string(),
417        })
418    }
419
420    /// Eats the next token if it is a delimited token with the given delimiter;
421    /// returns a `&str` slice of the token's contents.
422    pub fn eat_delimited(&mut self, delimiter: Delimiter) -> Result<&'token str, ParseFail<'db>> {
423        if let Some(&Token {
424            kind:
425                TokenKind::Delimited {
426                    delimiter: delimiter1,
427                    text,
428                },
429            span: _,
430            skipped: _,
431        }) = self.peek()
432            && delimiter == delimiter1
433        {
434            self.eat_next_token().unwrap();
435            return Ok(text);
436        }
437
438        Err(self.illformed(Expected::Delimited(delimiter)))
439    }
440
441    /// Returns true if the next token is on the same line
442    /// as the most recently consumed token.
443    /// Some parts of our grammar are newline sensitive.
444    fn next_token_on_same_line(&mut self) -> bool {
445        match self.peek() {
446            Some(Token { skipped, .. }) => match skipped {
447                Some(skipped) => *skipped <= Skipped::Whitespace,
448                None => true,
449            },
450            None => false,
451        }
452    }
453}
454
455/// Parse an instance of `Self` from the given [`Parser`][].
456///
457/// There are several parsing methods depending on how many instances of `Self` you wish to parse:
458///
459/// * [`opt_parse`](Parse::opt_parse) -- 0 or 1 instance (`x?` in a regex)
460/// * [`opt_parse_comma`](Parse::opt_parse) -- comma-separated list, returns `None` if no elements found
461/// * [`opt_parse_delimited`](Parse::opt_parse_delimited) -- delimited comma-separated list, `None` if no delimiters are found
462/// * [`eat`](Parse::eat) -- exactly 1 instance (`x` in a regex`)
463/// * [`eat_comma`](Parse::eat_comma) -- comma-separated list, returns an empty list if no elements found
464/// * [`eat_delimited`](Parse::eat_delimited) -- delimited comma-separated list where delimiters are mandatory
465///
466/// Implementors need only provide `opt_parse`, the rest are automatically provided.
467///
468/// # Return values
469///
470/// The `opt_parse` methods return an `Result<Option<_>, ParseFail<'db>>` as follows:
471///
472/// * `Ok(Some(v))` -- parse succeeded (possibly with recovery,
473///   in which case diagnostics will be stored into the [`Parser`][]).
474/// * `Ok(None)` -- no instance of `Self` was found.
475/// * `Err(err)` -- a malformed instance of `Self` was found. Some tokens were consumed.
476///
477/// The `eat` methods return a `Result<_, ParseFail<'db>>` and hence only distinguish success and error.
478///
479/// # Diagnostics
480///
481/// Parsing something **can never** report diagnostics to the user.
482/// Any diagnostics need to be accumulated in the [`Parser`][].
483#[allow(dead_code)] // some fns not currently used
484trait Parse<'db>: Sized {
485    type Output: Update;
486
487    /// Speculatively parses to see if we could eat a `Self`
488    /// without any error. Never consumes tokens nor produces an error.
489    fn can_eat(db: &'db dyn crate::Db, parser: &Parser<'_, 'db>) -> bool {
490        let mut parser = parser.fork();
491        match Self::eat(db, &mut parser) {
492            Ok(_) => parser.diagnostics.is_empty(),
493            Err(_) => false,
494        }
495    }
496
497    /// Parses an instance of `Self` from the given [`Parser`][], reporting an error if no instance is found.
498    fn eat(
499        db: &'db dyn crate::Db,
500        parser: &mut Parser<'_, 'db>,
501    ) -> Result<Self::Output, ParseFail<'db>> {
502        match Self::opt_parse(db, parser)? {
503            Some(v) => Ok(v),
504            None => Err(parser.illformed(Self::expected())),
505        }
506    }
507
508    /// Parse zero-or-more comma-separated instances of `Self` returning a vector.
509    /// Accepts trailing commas.
510    fn eat_comma(
511        db: &'db dyn crate::Db,
512        parser: &mut Parser<'_, 'db>,
513    ) -> Result<SpanVec<'db, Self::Output>, ParseFail<'db>> {
514        match Self::opt_parse_comma(db, parser)? {
515            Some(v) => Ok(v),
516            None => Ok(SpanVec {
517                span: parser.last_span().at_end(),
518                values: vec![],
519            }),
520        }
521    }
522
523    /// Parse zero-or-more instances of `Self` returning a vector.
524    fn eat_many(
525        db: &'db dyn crate::Db,
526        parser: &mut Parser<'_, 'db>,
527    ) -> Result<SpanVec<'db, Self::Output>, ParseFail<'db>> {
528        let mut values = vec![];
529        let start_span = parser.peek_span();
530        loop {
531            match Self::opt_parse(db, parser) {
532                Ok(Some(v)) => values.push(v),
533                Ok(None) => break,
534                Err(err) if values.is_empty() => return Err(err),
535                Err(err) => {
536                    parser.push_diagnostic(err.into_diagnostic(db));
537                    break;
538                }
539            }
540        }
541
542        Ok(SpanVec {
543            span: start_span.to(db, parser.last_span()),
544            values,
545        })
546    }
547
548    /// Eat a comma separated list of Self, delimited by `delimiter`
549    /// (e.g., `(a, b, c)`).
550    fn eat_delimited<T>(
551        db: &'db dyn crate::Db,
552        parser: &mut Parser<'_, 'db>,
553        delimiter: Delimiter,
554        eat_method: impl FnOnce(&'db dyn crate::Db, &mut Parser<'_, 'db>) -> Result<T, ParseFail<'db>>,
555    ) -> Result<T, ParseFail<'db>> {
556        match Self::opt_parse_delimited(db, parser, delimiter, eat_method)? {
557            Some(v) => Ok(v),
558            None => Err(parser.illformed(Expected::Delimited(delimiter))),
559        }
560    }
561
562    /// Parse a single instance of `Self`, returning `Ok(Some(v))`.
563    /// Returns `Ok(None)` if `Self` was not present or `Err(err)`
564    /// if `Self` appeared to be present but was ill-formed.
565    ///
566    /// Invariants maintained by this method:
567    ///
568    /// * If `Ok(None)` is returned, consumed *NO* tokens and reported *NO* diagnostics.
569    /// * If `Err` is returned, consumed at least one token (not true for `eat` methods).
570    fn opt_parse(
571        db: &'db dyn crate::Db,
572        parser: &mut Parser<'_, 'db>,
573    ) -> Result<Option<Self::Output>, ParseFail<'db>>;
574
575    /// Parse a delimited list of Self
576    /// e.g., `(a, b, c)` or `[a, b, c]`. Returns `None` if
577    /// the given delimiters indicated by `delimiter` are not found.
578    fn opt_parse_delimited<T>(
579        db: &'db dyn crate::Db,
580        parser: &mut Parser<'_, 'db>,
581        delimiter: Delimiter,
582        eat_method: impl FnOnce(&'db dyn crate::Db, &mut Parser<'_, 'db>) -> Result<T, ParseFail<'db>>,
583    ) -> Result<Option<T>, ParseFail<'db>> {
584        let Ok(text) = parser.eat_delimited(delimiter) else {
585            return Ok(None);
586        };
587
588        let text_span = parser.last_span();
589        let input_offset = text_span.start + 1; // account for the opening delimiter
590        let tokenized = tokenize(db, text_span.anchor, input_offset, text);
591        let mut parser1 = Parser::new(db, text_span.anchor, &tokenized);
592        let opt_list_err = eat_method(db, &mut parser1);
593        parser.take_diagnostics(parser1);
594        Ok(Some(opt_list_err?))
595    }
596
597    /// Parse a comma separated list of Self
598    fn opt_parse_comma(
599        db: &'db dyn crate::Db,
600        parser: &mut Parser<'_, 'db>,
601    ) -> Result<Option<SpanVec<'db, Self::Output>>, ParseFail<'db>> {
602        Self::opt_parse_separated(db, parser, operator::COMMA)
603    }
604
605    /// Parse a `separator` separated list of Self
606    fn opt_parse_separated(
607        db: &'db dyn crate::Db,
608        parser: &mut Parser<'_, 'db>,
609        separator: operator::Op,
610    ) -> Result<Option<SpanVec<'db, Self::Output>>, ParseFail<'db>> {
611        match Self::opt_parse(db, parser) {
612            Ok(Some(item)) => {
613                let mut values = vec![item];
614
615                while parser.eat_op(separator).is_ok() {
616                    match Self::opt_parse(db, parser) {
617                        Ok(Some(item)) => values.push(item),
618                        Ok(None) => break,
619                        Err(err) => {
620                            parser.push_diagnostic(err.into_diagnostic(db));
621                            break;
622                        }
623                    }
624                }
625
626                Ok(Some(SpanVec {
627                    span: parser.last_span(),
628                    values,
629                }))
630            }
631
632            Ok(None) => Ok(None),
633
634            Err(err) => Err(err),
635        }
636    }
637
638    /// If `guard_op` appears, then parse `Self`
639    fn opt_parse_guarded(
640        guard_op: impl ParseGuard,
641        db: &'db dyn crate::Db,
642        parser: &mut Parser<'_, 'db>,
643    ) -> Result<Option<Self::Output>, ParseFail<'db>> {
644        if guard_op.eat(db, parser) {
645            Ok(Some(Self::eat(db, parser)?))
646        } else {
647            Ok(None)
648        }
649    }
650
651    fn expected() -> Expected;
652}
653
654trait ParseGuard {
655    fn eat<'db>(self, db: &'db dyn crate::Db, parser: &mut Parser<'_, 'db>) -> bool;
656}
657
658impl ParseGuard for Op {
659    fn eat<'db>(self, _db: &'db dyn crate::Db, parser: &mut Parser<'_, 'db>) -> bool {
660        parser.eat_op(self).is_ok()
661    }
662}
663
664impl ParseGuard for Keyword {
665    fn eat<'db>(self, _db: &'db dyn crate::Db, parser: &mut Parser<'_, 'db>) -> bool {
666        parser.eat_keyword(self).is_ok()
667    }
668}
669
670#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
671pub enum ParseFail<'db> {
672    /// Given the span of the previous token and the span of the (unsuitable) next token,
673    /// report that the next token is not what we expected.
674    ExpectedTokenToBeFollowedBy(Span<'db>, Span<'db>, Expected),
675
676    /// Report that the token(s) at the given span are not what we expected.
677    Expected(Span<'db>, Expected),
678}
679
680#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug)]
681pub enum Expected {
682    EOF,
683    MoreTokens,
684    Identifier,
685    Operator(Op),
686    Keyword(Keyword),
687    Delimited(Delimiter),
688    Nonterminal(&'static str),
689}
690
691impl ParseFail<'_> {
692    pub fn into_diagnostic(self, db: &dyn crate::Db) -> Diagnostic {
693        return match self {
694            ParseFail::Expected(span, Expected::EOF) => {
695                Diagnostic::error(db, span, "extra input".to_string()).label(
696                    db,
697                    Level::Error,
698                    span,
699                    "I don't know what to do with this, it appears to be extra".to_string(),
700                )
701            }
702
703            ParseFail::ExpectedTokenToBeFollowedBy(span, next_span, expected) => {
704                let message = expected_to_string(db, expected);
705                Diagnostic::error(db, span, format!("expected {message} to come next"))
706                    .label(
707                        db,
708                        Level::Error,
709                        span,
710                        format!("I expected this to be followed by {message}"),
711                    )
712                    .label(
713                        db,
714                        Level::Info,
715                        next_span,
716                        "but instead I saw this".to_string(),
717                    )
718            }
719
720            ParseFail::Expected(span, expected) => {
721                let message = expected_to_string(db, expected);
722                Diagnostic::error(db, span, format!("expected {message}")).label(
723                    db,
724                    Level::Error,
725                    span,
726                    format!("I expected to see {message}, not this"),
727                )
728            }
729        };
730
731        fn expected_to_string(_db: &dyn crate::Db, expected: Expected) -> String {
732            match expected {
733                Expected::EOF => unreachable!(), // handled specially
734                Expected::MoreTokens => "more input".to_string(),
735                Expected::Identifier => "an identifier".to_string(),
736                Expected::Operator(op) => format!("`{op}`"),
737                Expected::Keyword(k) => format!("`{k:?}`"),
738                Expected::Delimited(d) => format!("`{}`", d.open_char()),
739                Expected::Nonterminal(n) => n.to_string(),
740            }
741        }
742    }
743}