Skip to main content

dada_ir_sym/check/
debug.rs

1#![expect(dead_code)]
2
3use std::{
4    collections::{BTreeMap, btree_map::Entry},
5    panic::Location,
6    rc::Rc,
7    sync::{Mutex, mpsc::Sender},
8};
9
10use dada_ir_ast::{DebugEvent, DebugEventPayload, span::Span};
11use dada_util::fixed_depth_json;
12use export::{CompilerLocation, TimeStamp};
13use serde::Serialize;
14
15use crate::ir::{generics::SymWhereClause, indices::InferVarIndex, types::SymTy};
16
17use super::predicates::Predicate;
18
19pub mod export;
20
21pub struct LogHandle<'db> {
22    log: Option<Rc<Mutex<Log<'db>>>>,
23    task_index: TaskIndex,
24}
25
26impl<'db> LogHandle<'db> {
27    pub fn root(
28        db: &'db dyn crate::Db,
29        compiler_location: &'static Location<'static>,
30        root: RootTaskDescription<'db>,
31    ) -> Self {
32        if let Some(debug_tx) = db.debug_tx() {
33            LogHandle {
34                log: Some(Rc::new(Mutex::new(Log::new(
35                    db,
36                    compiler_location,
37                    root,
38                    debug_tx,
39                )))),
40                task_index: TaskIndex::root(),
41            }
42        } else {
43            LogHandle {
44                log: None,
45                task_index: TaskIndex::root(),
46            }
47        }
48    }
49
50    const DISABLED: Self = LogHandle {
51        log: None,
52        task_index: TaskIndex::root(),
53    };
54
55    pub fn spawn(
56        &self,
57        compiler_location: &'static Location<'static>,
58        task_description: TaskDescription<'db>,
59    ) -> Self {
60        let Some(log) = &self.log else {
61            return Self::DISABLED;
62        };
63
64        let mut locked_log = log.lock().unwrap();
65        let spawned_task_index = locked_log.next_task_index();
66        let event_index = locked_log.next_event_index();
67        locked_log.push_task(Task {
68            task_description,
69            started_at: event_index,
70        });
71        locked_log.push_event(Event {
72            task: self.task_index,
73            compiler_location,
74            kind: EventKind::Spawned(spawned_task_index),
75        });
76        locked_log.push_event(Event {
77            task: spawned_task_index,
78            compiler_location,
79            kind: EventKind::TaskStart,
80        });
81        std::mem::drop(locked_log);
82
83        LogHandle {
84            log: Some(log.clone()),
85            task_index: spawned_task_index,
86        }
87    }
88
89    /// Duplicate this log handle. We assert that it is the root handle.
90    /// This is because there is no *good* reason to duplicate any other handle;
91    /// when new tasks are created you should use the `spawn` or other such methods
92    /// to access them.
93    pub fn duplicate_root_handle(&self) -> Self {
94        assert_eq!(self.task_index, TaskIndex::root());
95        Self {
96            log: self.log.clone(),
97            task_index: self.task_index,
98        }
99    }
100
101    /// Push an "indenting" log, which causes subsequent log messages to be indented
102    /// until `undent` is called.
103    pub fn indent(
104        &self,
105        compiler_location: &'static Location<'static>,
106        message: &'static str,
107        values: &[&dyn erased_serde::Serialize],
108    ) {
109        self.push_event(compiler_location, message, values, |message, json_value| {
110            EventKind::Indent {
111                message,
112                json_value,
113            }
114        })
115    }
116
117    /// Remove one layer of indent
118    pub fn undent(&self, compiler_location: &'static Location<'static>, message: &'static str) {
119        self.push_event(compiler_location, message, &[], |message, _| {
120            EventKind::Undent { message }
121        })
122    }
123
124    /// Log a message with argument(s).
125    pub fn log(
126        &self,
127        compiler_location: &'static Location<'static>,
128        message: &'static str,
129        values: &[&dyn erased_serde::Serialize],
130    ) {
131        self.push_event(compiler_location, message, values, |message, json_value| {
132            EventKind::Log {
133                message,
134                json_value,
135            }
136        })
137    }
138
139    /// Log a message with argument(s).
140    pub fn infer(
141        &self,
142        compiler_location: &'static Location<'static>,
143        message: &'static str,
144        infer: InferVarIndex,
145        values: &[&dyn erased_serde::Serialize],
146    ) {
147        self.push_event(compiler_location, message, values, |message, json_value| {
148            EventKind::Infer {
149                infer,
150                message,
151                json_value,
152            }
153        })
154    }
155
156    fn push_event(
157        &self,
158        compiler_location: &'static Location<'static>,
159        message: &'static str,
160        values: &[&dyn erased_serde::Serialize],
161        kind: impl FnOnce(&'static str, String) -> EventKind,
162    ) {
163        let Some(log) = &self.log else {
164            return;
165        };
166
167        let mut log = log.lock().unwrap();
168        assert!(
169            self.task_index.0 < log.tasks.len(),
170            "task index {} is out of bounds",
171            self.task_index.0
172        );
173
174        let argument = event_argument(values);
175
176        log.push_event(Event {
177            compiler_location,
178            task: self.task_index,
179            kind: kind(message, argument),
180        });
181    }
182
183    pub fn dump(&self, span: Span<'db>) {
184        let Some(log) = &self.log else {
185            return;
186        };
187
188        let log = log.lock().unwrap();
189        log.dump(span);
190    }
191}
192
193pub struct Log<'db> {
194    db: &'db dyn crate::Db,
195    tasks: Vec<Task<'db>>,
196    events: Vec<Event>,
197    debug_tx: Sender<DebugEvent>,
198}
199
200impl<'db> Log<'db> {
201    fn new(
202        db: &'db dyn crate::Db,
203        compiler_location: &'static Location<'static>,
204        root: RootTaskDescription<'db>,
205        debug_tx: Sender<DebugEvent>,
206    ) -> Self {
207        let tasks = vec![Task {
208            task_description: TaskDescription::Root(root),
209            started_at: EventIndex(0),
210        }];
211
212        let events = vec![Event {
213            task: TaskIndex::root(),
214            compiler_location,
215            kind: EventKind::Root,
216        }];
217
218        Self {
219            db,
220            tasks,
221            events,
222            debug_tx,
223        }
224    }
225
226    fn next_task_index(&self) -> TaskIndex {
227        TaskIndex(self.tasks.len())
228    }
229
230    fn next_event_index(&self) -> EventIndex {
231        EventIndex(self.events.len())
232    }
233
234    fn push_task(&mut self, task: Task<'db>) {
235        self.tasks.push(task);
236    }
237
238    fn push_event(&mut self, event: Event) {
239        self.events.push(event);
240    }
241
242    fn dump(&self, span: Span<'db>) {
243        let export = self.export();
244        let absolute_span = span.absolute_span(self.db);
245
246        self.debug_tx
247            .send(DebugEvent {
248                url: absolute_span.source_file.url(self.db).clone(),
249                start: absolute_span.start,
250                end: absolute_span.end,
251                payload: DebugEventPayload::CheckLog(serde_json::to_value(export).unwrap()),
252            })
253            .unwrap();
254    }
255
256    fn export(&self) -> export::Log<'_> {
257        // First: assemble the flat list of events, which is relatively straightforward.
258        let events_flat: Vec<export::Event<'_>> = self
259            .events
260            .iter()
261            .map(|event| export::Event {
262                compiler_location: CompilerLocation::from(event.compiler_location),
263                task: export::TaskId {
264                    index: event.task.0,
265                },
266                kind: match &event.kind {
267                    EventKind::Root => "root",
268                    EventKind::Spawned(..) => "spawned",
269                    EventKind::TaskStart => "task_start",
270                    EventKind::Indent { message, .. } => message,
271                    EventKind::Undent { .. } => "end",
272                    EventKind::Log { message, .. } => message,
273                    EventKind::Infer { message, .. } => message,
274                },
275                value: match &event.kind {
276                    EventKind::Root => "null".into(),
277                    EventKind::TaskStart => "null".into(),
278                    EventKind::Spawned(task_index) => {
279                        event_argument(&[&self.tasks[task_index.0].task_description]).into()
280                    }
281                    EventKind::Indent {
282                        message: _,
283                        json_value,
284                    } => json_value.into(),
285                    EventKind::Undent { message: _ } => "null".into(),
286                    EventKind::Log {
287                        message: _,
288                        json_value,
289                    } => json_value.into(),
290                    EventKind::Infer { json_value, .. } => json_value.into(),
291                },
292                spawns: match &event.kind {
293                    EventKind::Root => None,
294                    EventKind::TaskStart => None,
295                    EventKind::Spawned(task_index) => Some(export::TaskId {
296                        index: task_index.0,
297                    }),
298                    EventKind::Indent { .. } => None,
299                    EventKind::Undent { .. } => None,
300                    EventKind::Log { .. } => None,
301                    EventKind::Infer { .. } => None,
302                },
303                infer: match &event.kind {
304                    EventKind::Root
305                    | EventKind::TaskStart
306                    | EventKind::Spawned(..)
307                    | EventKind::Indent { .. }
308                    | EventKind::Undent { .. }
309                    | EventKind::Log { .. } => None,
310                    EventKind::Infer { infer, .. } => Some(*infer),
311                },
312            })
313            .collect();
314
315        // Next: assemble the list of events by task.
316        let mut events_by_task: Vec<Vec<usize>> = (0..self.tasks.len()).map(|_| vec![]).collect();
317        for (event, index) in self.events.iter().zip(0..) {
318            events_by_task[event.task.0].push(index);
319        }
320
321        // Next: assemble the nested events.
322        let root_task = TaskIndex::root();
323        let nested_event = self.export_nested_event_for_task(root_task, &events_by_task);
324
325        // Assemble inference events
326        let infers = self.export_infers();
327
328        // Assemble tasks
329        let tasks = self
330            .tasks
331            .iter()
332            .zip(0..)
333            .map(|(task, index)| self.export_task(task, index))
334            .collect();
335
336        // Create the root event info
337        let root_event = &self.events[0]; // The first event is the root event
338        let root_task = &self.tasks[0]; // The first task is the root task
339
340        let root_event_info = export::RootEventInfo {
341            compiler_location: CompilerLocation::from(root_event.compiler_location),
342            description: event_argument(&[&root_task.task_description]),
343        };
344
345        export::Log {
346            events_flat,
347            nested_event,
348            tasks,
349            infers,
350            // New fields
351            root_event_info,
352            total_events: self.events.len(),
353        }
354    }
355
356    fn export_task(&self, task: &Task<'db>, task_index: usize) -> export::Task {
357        export::Task {
358            spawned_at: export::TimeStamp {
359                index: task.started_at.0,
360            },
361            description: event_argument(&[&task.task_description]),
362            events: self
363                .events
364                .iter()
365                .zip(0..)
366                .filter(|(event, _)| event.task.0 == task_index)
367                .map(|(_, index)| TimeStamp { index })
368                .collect(),
369        }
370    }
371
372    fn export_infers(&self) -> Vec<export::Infer> {
373        let mut events_by_infer_var: BTreeMap<InferVarIndex, export::Infer> = Default::default();
374
375        for (event, index) in self.events.iter().zip(0..) {
376            if let EventKind::Infer { infer, .. } = &event.kind {
377                match events_by_infer_var.entry(*infer) {
378                    Entry::Vacant(e) => {
379                        e.insert(export::Infer {
380                            created_at: TimeStamp { index },
381                            events: vec![],
382                        });
383                    }
384                    Entry::Occupied(mut e) => {
385                        e.get_mut().events.push(TimeStamp { index });
386                    }
387                }
388            }
389        }
390
391        events_by_infer_var.into_values().collect()
392    }
393
394    fn export_nested_event_for_task(
395        &self,
396        task: TaskIndex,
397        events_by_task: &[Vec<usize>],
398    ) -> export::NestedEvent {
399        let Some((event_first, mut events_rest)) = events_by_task[task.0].split_first() else {
400            panic!("no root event")
401        };
402
403        export::NestedEvent {
404            timestamp: export::TimeStamp {
405                index: *event_first,
406            },
407            children: self.export_child_nested_events(&mut events_rest, events_by_task),
408        }
409    }
410
411    fn export_child_nested_events(
412        &self,
413        task_events: &mut &[usize],
414        events_by_task: &[Vec<usize>],
415    ) -> Vec<export::NestedEvent> {
416        let mut output = vec![];
417
418        loop {
419            let Some((event_first, events_rest)) = task_events.split_first() else {
420                return output;
421            };
422            *task_events = events_rest;
423            let event_kind = &self.events[*event_first];
424            match &event_kind.kind {
425                EventKind::Undent { .. } => {
426                    return output;
427                }
428                EventKind::Spawned(spawned_task) => {
429                    output.push(export::NestedEvent {
430                        timestamp: export::TimeStamp {
431                            index: *event_first,
432                        },
433                        children: vec![
434                            self.export_nested_event_for_task(*spawned_task, events_by_task),
435                        ],
436                    });
437                }
438                EventKind::Indent { .. } => {
439                    output.push(export::NestedEvent {
440                        timestamp: export::TimeStamp {
441                            index: *event_first,
442                        },
443                        children: self.export_child_nested_events(task_events, events_by_task),
444                    });
445                }
446                EventKind::Infer { .. }
447                | EventKind::Root
448                | EventKind::Log { .. }
449                | EventKind::TaskStart => {
450                    output.push(export::NestedEvent {
451                        timestamp: export::TimeStamp {
452                            index: *event_first,
453                        },
454                        children: Default::default(),
455                    });
456                }
457            }
458        }
459    }
460}
461
462pub fn event_argument(values: &[&dyn erased_serde::Serialize]) -> String {
463    // FIXME: rewrite `fixed_depth_json` to not create a value
464
465    let value = if values.is_empty() {
466        serde_json::Value::Null
467    } else if values.len() == 1 {
468        fixed_depth_json::to_json_value_max_depth(values[0], 22)
469    } else {
470        fixed_depth_json::to_json_value_max_depth(&values, 22)
471    };
472
473    serde_json::to_string(&value).unwrap()
474}
475
476#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
477pub struct TaskIndex(usize);
478
479impl TaskIndex {
480    pub const fn root() -> Self {
481        TaskIndex(0)
482    }
483}
484
485pub struct Task<'db> {
486    pub task_description: TaskDescription<'db>,
487    pub started_at: EventIndex,
488}
489
490#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
491pub struct EventIndex(usize);
492
493pub struct Event {
494    pub task: TaskIndex,
495    pub compiler_location: &'static Location<'static>,
496    pub kind: EventKind,
497}
498
499pub enum EventKind {
500    /// Root event of type checking
501    Root,
502
503    /// Start event for a task spawned during type checking
504    TaskStart,
505
506    /// Current task spawned the child with the given index
507    Spawned(TaskIndex),
508
509    /// Display hint: indent further logs until `Undent` encountered
510    Indent {
511        message: &'static str,
512        json_value: String,
513    },
514
515    /// End indenting
516    Undent { message: &'static str },
517
518    /// Add a log item with the given header + (JSON-encoded) argument
519    Log {
520        message: &'static str,
521        json_value: String,
522    },
523
524    /// A log message about an inference variable being created or modified
525    Infer {
526        message: &'static str,
527        infer: InferVarIndex,
528        json_value: String,
529    },
530}
531
532#[derive(Serialize)]
533pub struct RootTaskDescription<'db> {
534    pub span: Span<'db>,
535    pub message: Option<&'static str>,
536    pub values: Option<String>,
537}
538
539#[derive(Serialize)]
540pub enum TaskDescription<'db> {
541    Root(RootTaskDescription<'db>),
542    Require(usize),
543    Join(usize),
544    All(usize),
545    Any(usize),
546    IfRequired,
547    IfNotRequired,
548    RequireAssignableType(SymTy<'db>, SymTy<'db>),
549    RequireEqualTypes(SymTy<'db>, SymTy<'db>),
550    RequireMyNumericType(SymTy<'db>),
551    RequireNumericType(SymTy<'db>),
552    RequireFutureType(SymTy<'db>),
553    RequireBoundsProvablyPredicate(InferVarIndex, Predicate),
554    RequireBoundsNotProvablyPredicate(InferVarIndex, Predicate),
555    RequireWhereClause(SymWhereClause<'db>),
556    RequireLowerChain,
557    IfNotNever,
558    Misc,
559    CheckArg(usize),
560    ReconcileTyBounds(InferVarIndex),
561    RelateInferBounds,
562}
563
564pub struct InferenceVariable<'db> {
565    span: Span<'db>,
566}