diff --git a/analysis/dune b/analysis/dune index 6b297d2e58..9b02abb4b5 100644 --- a/analysis/dune +++ b/analysis/dune @@ -1,4 +1,4 @@ -(dirs bin src reanalyze vendor) +(dirs bin src reactive reanalyze vendor) (env (dev diff --git a/analysis/reactive/Makefile b/analysis/reactive/Makefile new file mode 100644 index 0000000000..76e49736c7 --- /dev/null +++ b/analysis/reactive/Makefile @@ -0,0 +1,15 @@ +.PHONY: build test clean + +# Build the reactive library +build: + dune build src/ + +# Run all tests +test: + dune build test/ReactiveTest.exe + dune exec test/ReactiveTest.exe + +# Clean build artifacts +clean: + dune clean + diff --git a/analysis/reactive/README.md b/analysis/reactive/README.md new file mode 100644 index 0000000000..9f55e57eff --- /dev/null +++ b/analysis/reactive/README.md @@ -0,0 +1,109 @@ +# Reactive Collections Library + +A library for incremental computation using reactive collections with delta-based updates. + +## Overview + +This library provides composable reactive collections that automatically propagate changes through a computation graph. When source data changes, only the affected parts of derived collections are recomputed. + +### Key Features + +- **Delta-based updates**: Changes propagate as `Set`, `Remove`, or `Batch` deltas +- **Glitch-free semantics**: Topological scheduling ensures consistent updates +- **Composable combinators**: `flatMap`, `join`, `union`, `fixpoint` +- **Incremental fixpoint**: Efficient transitive closure with support for additions and removals + +## Usage + +```ocaml +open Reactive + +(* Create a source collection *) +let (files, emit) = source ~name:"files" () + +(* Derive collections with combinators *) +let decls = flatMap ~name:"decls" files + ~f:(fun _path data -> data.declarations) + () + +let refs = flatMap ~name:"refs" files + ~f:(fun _path data -> data.references) + ~merge:PosSet.union + () + +(* Join collections *) +let resolved = join ~name:"resolved" refs decls + ~key_of:(fun pos _ref -> pos) + ~f:(fun pos ref decl_opt -> ...) + () + +(* Compute transitive closure *) +let reachable = fixpoint ~name:"reachable" + ~init:roots + ~edges:graph + () + +(* Emit changes *) +emit (Set ("file.res", file_data)) +emit (Batch [set "a.res" data_a; set "b.res" data_b]) +``` + +## Combinators + +| Combinator | Description | +|------------|-------------| +| `source` | Create a mutable source collection | +| `flatMap` | Transform and flatten entries, with optional merge | +| `join` | Look up keys from left collection in right collection | +| `union` | Combine two collections, with optional merge for conflicts | +| `fixpoint` | Compute transitive closure incrementally | + +## Building & Testing + +```bash +# Build the library +make build + +# Run all tests +make test + +# Clean build artifacts +make clean +``` + +## Test Structure + +Tests are organized by theme: + +| File | Description | +|------|-------------| +| `FlatMapTest.ml` | FlatMap combinator tests | +| `JoinTest.ml` | Join combinator tests | +| `UnionTest.ml` | Union combinator tests | +| `FixpointBasicTest.ml` | Basic fixpoint graph traversal | +| `FixpointIncrementalTest.ml` | Incremental fixpoint updates | +| `BatchTest.ml` | Batch processing tests | +| `IntegrationTest.ml` | End-to-end file processing | +| `GlitchFreeTest.ml` | Glitch-free scheduler tests | + +## Glitch-Free Semantics + +The scheduler ensures that derived collections never see inconsistent intermediate states: + +1. **Topological levels**: Each node has a level based on its dependencies +2. **Accumulate phase**: All deltas at a level are collected before processing +3. **Propagate phase**: Nodes process accumulated deltas in level order + +This prevents issues like: +- Anti-joins seeing partial data (e.g., refs without matching decls) +- Multi-level unions causing spurious additions/removals + +## Usage in Reanalyze + +This library powers the reactive dead code analysis in reanalyze: + +- `ReactiveFileCollection`: Manages CMT file processing +- `ReactiveMerge`: Merges per-file data into global collections +- `ReactiveLiveness`: Computes live declarations via fixpoint +- `ReactiveSolver`: Generates dead code issues reactively + diff --git a/analysis/reactive/dune b/analysis/reactive/dune new file mode 100644 index 0000000000..2aac24f843 --- /dev/null +++ b/analysis/reactive/dune @@ -0,0 +1 @@ +(dirs src test) diff --git a/analysis/reactive/src/Reactive.ml b/analysis/reactive/src/Reactive.ml new file mode 100644 index 0000000000..71d94ad8b3 --- /dev/null +++ b/analysis/reactive/src/Reactive.ml @@ -0,0 +1,1185 @@ +(** Reactive V2: Accumulate-then-propagate scheduler for glitch-free semantics. + + Key design: + 1. Nodes accumulate batch deltas (don't process immediately) + 2. Scheduler visits nodes in dependency order + 3. Each node processes accumulated deltas exactly once per wave + + This eliminates glitches from multi-level dependencies. *) + +(** {1 Deltas} *) + +type ('k, 'v) delta = + | Set of 'k * 'v + | Remove of 'k + | Batch of ('k * 'v option) list + +let set k v = (k, Some v) +let remove k = (k, None) + +let delta_to_entries = function + | Set (k, v) -> [(k, Some v)] + | Remove k -> [(k, None)] + | Batch entries -> entries + +let merge_entries entries = + (* Deduplicate: later entries win *) + let tbl = Hashtbl.create (List.length entries) in + List.iter (fun (k, v) -> Hashtbl.replace tbl k v) entries; + Hashtbl.fold (fun k v acc -> (k, v) :: acc) tbl [] + +let count_adds_removes entries = + List.fold_left + (fun (adds, removes) (_, v) -> + match v with + | Some _ -> (adds + 1, removes) + | None -> (adds, removes + 1)) + (0, 0) entries + +(** {1 Statistics} *) + +type stats = { + (* Input tracking *) + mutable deltas_received: int; + (** Number of delta messages (Set/Remove/Batch) *) + mutable entries_received: int; (** Total entries after expanding batches *) + mutable adds_received: int; (** Set operations received from upstream *) + mutable removes_received: int; + (** Remove operations received from upstream *) + (* Processing tracking *) + mutable process_count: int; (** Times process() was called *) + mutable process_time_ns: int64; (** Total time in process() *) + (* Output tracking *) + mutable deltas_emitted: int; (** Number of delta messages emitted *) + mutable entries_emitted: int; (** Total entries in emitted deltas *) + mutable adds_emitted: int; (** Set operations emitted downstream *) + mutable removes_emitted: int; (** Remove operations emitted downstream *) +} + +let create_stats () = + { + deltas_received = 0; + entries_received = 0; + adds_received = 0; + removes_received = 0; + process_count = 0; + process_time_ns = 0L; + deltas_emitted = 0; + entries_emitted = 0; + adds_emitted = 0; + removes_emitted = 0; + } + +(** Count adds and removes in a list of entries *) +let count_changes entries = + let adds = ref 0 in + let removes = ref 0 in + List.iter + (fun (_, v_opt) -> + match v_opt with + | Some _ -> incr adds + | None -> incr removes) + entries; + (!adds, !removes) + +(** {1 Node Registry} *) + +module Registry = struct + type node_info = { + name: string; + level: int; + mutable upstream: string list; + mutable downstream: string list; + mutable dirty: bool; + process: unit -> unit; (* Process accumulated deltas *) + stats: stats; + } + + let nodes : (string, node_info) Hashtbl.t = Hashtbl.create 64 + let edges : (string * string, string) Hashtbl.t = Hashtbl.create 128 + + (* Combinator nodes: (combinator_id, (shape, inputs, output)) *) + let combinators : (string, string * string list * string) Hashtbl.t = + Hashtbl.create 32 + let dirty_nodes : string list ref = ref [] + + let register ~name ~level ~process ~stats = + let info = + { + name; + level; + upstream = []; + downstream = []; + dirty = false; + process; + stats; + } + in + Hashtbl.replace nodes name info; + info + + let add_edge ~from_name ~to_name ~label = + Hashtbl.replace edges (from_name, to_name) label; + (match Hashtbl.find_opt nodes from_name with + | Some info -> info.downstream <- to_name :: info.downstream + | None -> ()); + match Hashtbl.find_opt nodes to_name with + | Some info -> info.upstream <- from_name :: info.upstream + | None -> () + + (** Register a multi-input combinator (rendered as diamond in Mermaid) *) + let add_combinator ~name ~shape ~inputs ~output = + Hashtbl.replace combinators name (shape, inputs, output) + + let mark_dirty name = + match Hashtbl.find_opt nodes name with + | Some info when not info.dirty -> + info.dirty <- true; + dirty_nodes := name :: !dirty_nodes + | _ -> () + + let clear () = + Hashtbl.clear nodes; + Hashtbl.clear edges; + Hashtbl.clear combinators; + dirty_nodes := [] + + let reset_stats () = + Hashtbl.iter + (fun _ info -> + info.stats.deltas_received <- 0; + info.stats.entries_received <- 0; + info.stats.adds_received <- 0; + info.stats.removes_received <- 0; + info.stats.process_count <- 0; + info.stats.process_time_ns <- 0L; + info.stats.deltas_emitted <- 0; + info.stats.entries_emitted <- 0; + info.stats.adds_emitted <- 0; + info.stats.removes_emitted <- 0) + nodes + + (** Generate Mermaid diagram of the pipeline *) + let to_mermaid () = + let buf = Buffer.create 256 in + Buffer.add_string buf "graph TD\n"; + (* Collect edges that are part of combinators *) + let combinator_edges = Hashtbl.create 64 in + Hashtbl.iter + (fun comb_name (_, inputs, output) -> + List.iter + (fun input -> + Hashtbl.replace combinator_edges (input, output) comb_name) + inputs) + combinators; + (* Output regular nodes *) + Hashtbl.iter + (fun name _info -> + Buffer.add_string buf (Printf.sprintf " %s[%s]\n" name name)) + nodes; + (* Output combinator nodes (diamond shape) with classes *) + let join_nodes = ref [] in + let union_nodes = ref [] in + let fixpoint_nodes = ref [] in + Hashtbl.iter + (fun comb_name (shape, _inputs, _output) -> + Buffer.add_string buf (Printf.sprintf " %s{%s}\n" comb_name shape); + match shape with + | "join" -> join_nodes := comb_name :: !join_nodes + | "union" -> union_nodes := comb_name :: !union_nodes + | "fixpoint" -> fixpoint_nodes := comb_name :: !fixpoint_nodes + | _ -> ()) + combinators; + (* Output edges *) + Hashtbl.iter + (fun name info -> + List.iter + (fun downstream -> + (* Check if this edge is part of a combinator *) + match Hashtbl.find_opt combinator_edges (name, downstream) with + | Some comb_name -> + (* Edge goes to combinator node instead *) + Buffer.add_string buf + (Printf.sprintf " %s --> %s\n" name comb_name) + | None -> + let label = + match Hashtbl.find_opt edges (name, downstream) with + | Some l -> l + | None -> "" + in + if label = "" then + Buffer.add_string buf + (Printf.sprintf " %s --> %s\n" name downstream) + else + Buffer.add_string buf + (Printf.sprintf " %s -->|%s| %s\n" name label downstream)) + info.downstream) + nodes; + (* Output edges from combinators to their outputs *) + Hashtbl.iter + (fun comb_name (_shape, _inputs, output) -> + Buffer.add_string buf + (Printf.sprintf " %s --> %s\n" comb_name output)) + combinators; + (* Style definitions for combinator types *) + Buffer.add_string buf + "\n classDef joinClass fill:#e6f3ff,stroke:#0066cc\n"; + Buffer.add_string buf + " classDef unionClass fill:#fff0e6,stroke:#cc6600\n"; + Buffer.add_string buf + " classDef fixpointClass fill:#e6ffe6,stroke:#006600\n"; + (* Assign classes to combinator nodes *) + if !join_nodes <> [] then + Buffer.add_string buf + (Printf.sprintf " class %s joinClass\n" + (String.concat "," !join_nodes)); + if !union_nodes <> [] then + Buffer.add_string buf + (Printf.sprintf " class %s unionClass\n" + (String.concat "," !union_nodes)); + if !fixpoint_nodes <> [] then + Buffer.add_string buf + (Printf.sprintf " class %s fixpointClass\n" + (String.concat "," !fixpoint_nodes)); + Buffer.contents buf + + (** Print timing stats for all nodes *) + let print_stats () = + let all = Hashtbl.fold (fun _ info acc -> info :: acc) nodes [] in + let sorted = List.sort (fun a b -> compare a.level b.level) all in + Printf.eprintf "Node statistics:\n"; + Printf.eprintf " %-30s | %8s %8s %5s %5s | %8s %8s %5s %5s | %5s %8s\n" + "name" "d_recv" "e_recv" "+in" "-in" "d_emit" "e_emit" "+out" "-out" + "runs" "time_ms"; + Printf.eprintf " %s\n" (String.make 115 '-'); + List.iter + (fun info -> + let s = info.stats in + let time_ms = Int64.to_float s.process_time_ns /. 1e6 in + Printf.eprintf + " %-30s | %8d %8d %5d %5d | %8d %8d %5d %5d | %5d %8.2f\n" + (Printf.sprintf "%s (L%d)" info.name info.level) + s.deltas_received s.entries_received s.adds_received + s.removes_received s.deltas_emitted s.entries_emitted s.adds_emitted + s.removes_emitted s.process_count time_ms) + sorted +end + +(** {1 Scheduler} *) + +module Scheduler = struct + let propagating = ref false + let wave_counter = ref 0 + + let is_propagating () = !propagating + + (** Process all dirty nodes in level order *) + let propagate () = + if !propagating then + failwith "Scheduler.propagate: already propagating (nested call)" + else ( + propagating := true; + incr wave_counter; + + while !Registry.dirty_nodes <> [] do + (* Get all dirty nodes, sort by level *) + let dirty = !Registry.dirty_nodes in + Registry.dirty_nodes := []; + + let nodes_with_levels = + dirty + |> List.filter_map (fun name -> + match Hashtbl.find_opt Registry.nodes name with + | Some info -> Some (info.Registry.level, name, info) + | None -> None) + in + + let sorted = + List.sort + (fun (l1, _, _) (l2, _, _) -> compare l1 l2) + nodes_with_levels + in + + (* Find minimum level *) + match sorted with + | [] -> () + | (min_level, _, _) :: _ -> + (* Process all nodes at minimum level *) + let at_level, rest = + List.partition (fun (l, _, _) -> l = min_level) sorted + in + + (* Put remaining back in dirty list *) + List.iter + (fun (_, name, _) -> + Registry.dirty_nodes := name :: !Registry.dirty_nodes) + rest; + + (* Process nodes at this level *) + List.iter + (fun (_, _, info) -> + info.Registry.dirty <- false; + let start = Sys.time () in + info.Registry.process (); + let elapsed = Sys.time () -. start in + info.Registry.stats.process_time_ns <- + Int64.add info.Registry.stats.process_time_ns + (Int64.of_float (elapsed *. 1e9)); + info.Registry.stats.process_count <- + info.Registry.stats.process_count + 1) + at_level + done; + + propagating := false) + + let wave_count () = !wave_counter + let reset_wave_count () = wave_counter := 0 +end + +(** {1 Collection Interface} *) + +type ('k, 'v) t = { + name: string; + subscribe: (('k, 'v) delta -> unit) -> unit; + iter: ('k -> 'v -> unit) -> unit; + get: 'k -> 'v option; + length: unit -> int; + stats: stats; + level: int; +} + +let iter f t = t.iter f +let get t k = t.get k +let length t = t.length () +let stats t = t.stats +let level t = t.level +let name t = t.name + +(** {1 Source Collection} *) + +let source ~name () = + let tbl = Hashtbl.create 64 in + let subscribers = ref [] in + let my_stats = create_stats () in + + (* Pending deltas to propagate *) + let pending = ref [] in + + let process () = + if !pending <> [] then ( + let entries = + !pending |> List.concat_map delta_to_entries |> merge_entries + in + pending := []; + if entries <> [] then ( + let num_adds, num_removes = count_changes entries in + my_stats.deltas_emitted <- my_stats.deltas_emitted + 1; + my_stats.entries_emitted <- + my_stats.entries_emitted + List.length entries; + my_stats.adds_emitted <- my_stats.adds_emitted + num_adds; + my_stats.removes_emitted <- my_stats.removes_emitted + num_removes; + let delta = Batch entries in + List.iter (fun h -> h delta) !subscribers)) + in + + let _info = Registry.register ~name ~level:0 ~process ~stats:my_stats in + + let collection = + { + name; + subscribe = (fun h -> subscribers := h :: !subscribers); + iter = (fun f -> Hashtbl.iter f tbl); + get = (fun k -> Hashtbl.find_opt tbl k); + length = (fun () -> Hashtbl.length tbl); + stats = my_stats; + level = 0; + } + in + + let emit delta = + (* Track input *) + my_stats.deltas_received <- my_stats.deltas_received + 1; + let entries = delta_to_entries delta in + my_stats.entries_received <- my_stats.entries_received + List.length entries; + let num_adds, num_removes = count_adds_removes entries in + my_stats.adds_received <- my_stats.adds_received + num_adds; + my_stats.removes_received <- my_stats.removes_received + num_removes; + + (* Apply to internal state immediately *) + (match delta with + | Set (k, v) -> Hashtbl.replace tbl k v + | Remove k -> Hashtbl.remove tbl k + | Batch entries -> + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some v -> Hashtbl.replace tbl k v + | None -> Hashtbl.remove tbl k) + entries); + (* Accumulate for propagation *) + pending := delta :: !pending; + Registry.mark_dirty name; + (* If not in propagation, start one *) + if not (Scheduler.is_propagating ()) then Scheduler.propagate () + in + + (collection, emit) + +(** {1 FlatMap} *) + +let flatMap ~name (src : ('k1, 'v1) t) ~f ?merge () : ('k2, 'v2) t = + let my_level = src.level + 1 in + let merge_fn = + match merge with + | Some m -> m + | None -> fun _ v -> v + in + + (* Internal state *) + let provenance : ('k1, 'k2 list) Hashtbl.t = Hashtbl.create 64 in + let contributions : ('k2, ('k1, 'v2) Hashtbl.t) Hashtbl.t = + Hashtbl.create 256 + in + let target : ('k2, 'v2) Hashtbl.t = Hashtbl.create 256 in + let subscribers = ref [] in + let my_stats = create_stats () in + + (* Pending input deltas *) + let pending = ref [] in + + let recompute_target k2 = + match Hashtbl.find_opt contributions k2 with + | None -> + Hashtbl.remove target k2; + Some (k2, None) + | Some contribs when Hashtbl.length contribs = 0 -> + Hashtbl.remove contributions k2; + Hashtbl.remove target k2; + Some (k2, None) + | Some contribs -> + let values = Hashtbl.fold (fun _ v acc -> v :: acc) contribs [] in + let merged = + match values with + | [] -> assert false + | [v] -> v + | v :: rest -> List.fold_left merge_fn v rest + in + Hashtbl.replace target k2 merged; + Some (k2, Some merged) + in + + let remove_source k1 = + match Hashtbl.find_opt provenance k1 with + | None -> [] + | Some target_keys -> + Hashtbl.remove provenance k1; + List.iter + (fun k2 -> + match Hashtbl.find_opt contributions k2 with + | None -> () + | Some contribs -> Hashtbl.remove contribs k1) + target_keys; + target_keys + in + + let add_source k1 entries = + let target_keys = List.map fst entries in + Hashtbl.replace provenance k1 target_keys; + List.iter + (fun (k2, v2) -> + let contribs = + match Hashtbl.find_opt contributions k2 with + | Some c -> c + | None -> + let c = Hashtbl.create 4 in + Hashtbl.replace contributions k2 c; + c + in + Hashtbl.replace contribs k1 v2) + entries; + target_keys + in + + let process_entry (k1, v1_opt) = + let old_affected = remove_source k1 in + let new_affected = + match v1_opt with + | None -> [] + | Some v1 -> + let entries = f k1 v1 in + add_source k1 entries + in + let all_affected = old_affected @ new_affected in + (* Deduplicate *) + let seen = Hashtbl.create (List.length all_affected) in + List.filter_map + (fun k2 -> + if Hashtbl.mem seen k2 then None + else ( + Hashtbl.replace seen k2 (); + recompute_target k2)) + all_affected + in + + let process () = + if !pending <> [] then ( + (* Track input deltas *) + my_stats.deltas_received <- + my_stats.deltas_received + List.length !pending; + let entries = + !pending |> List.concat_map delta_to_entries |> merge_entries + in + pending := []; + my_stats.entries_received <- + my_stats.entries_received + List.length entries; + let in_adds, in_removes = count_adds_removes entries in + my_stats.adds_received <- my_stats.adds_received + in_adds; + my_stats.removes_received <- my_stats.removes_received + in_removes; + + let output_entries = entries |> List.concat_map process_entry in + if output_entries <> [] then ( + let num_adds, num_removes = count_changes output_entries in + my_stats.deltas_emitted <- my_stats.deltas_emitted + 1; + my_stats.entries_emitted <- + my_stats.entries_emitted + List.length output_entries; + my_stats.adds_emitted <- my_stats.adds_emitted + num_adds; + my_stats.removes_emitted <- my_stats.removes_emitted + num_removes; + let delta = Batch output_entries in + List.iter (fun h -> h delta) !subscribers)) + in + + let _info = + Registry.register ~name ~level:my_level ~process ~stats:my_stats + in + Registry.add_edge ~from_name:src.name ~to_name:name ~label:"flatMap"; + + (* Subscribe to source: just accumulate *) + src.subscribe (fun delta -> + pending := delta :: !pending; + Registry.mark_dirty name); + + (* Initialize from existing data *) + src.iter (fun k v -> + let entries = f k v in + let _ = add_source k entries in + List.iter + (fun (k2, v2) -> + let contribs = + match Hashtbl.find_opt contributions k2 with + | Some c -> c + | None -> + let c = Hashtbl.create 4 in + Hashtbl.replace contributions k2 c; + c + in + Hashtbl.replace contribs k v2; + Hashtbl.replace target k2 v2) + entries); + + { + name; + subscribe = (fun h -> subscribers := h :: !subscribers); + iter = (fun f -> Hashtbl.iter f target); + get = (fun k -> Hashtbl.find_opt target k); + length = (fun () -> Hashtbl.length target); + stats = my_stats; + level = my_level; + } + +(** {1 Join} *) + +let join ~name (left : ('k1, 'v1) t) (right : ('k2, 'v2) t) ~key_of ~f ?merge () + : ('k3, 'v3) t = + let my_level = max left.level right.level + 1 in + let merge_fn = + match merge with + | Some m -> m + | None -> fun _ v -> v + in + + (* Internal state *) + let left_entries : ('k1, 'v1) Hashtbl.t = Hashtbl.create 64 in + let provenance : ('k1, 'k3 list) Hashtbl.t = Hashtbl.create 64 in + let contributions : ('k3, ('k1, 'v3) Hashtbl.t) Hashtbl.t = + Hashtbl.create 256 + in + let target : ('k3, 'v3) Hashtbl.t = Hashtbl.create 256 in + let left_to_right_key : ('k1, 'k2) Hashtbl.t = Hashtbl.create 64 in + let right_key_to_left_keys : ('k2, 'k1 list) Hashtbl.t = Hashtbl.create 64 in + let subscribers = ref [] in + let my_stats = create_stats () in + + (* Separate pending buffers for left and right *) + let left_pending = ref [] in + let right_pending = ref [] in + + let recompute_target k3 = + match Hashtbl.find_opt contributions k3 with + | None -> + Hashtbl.remove target k3; + Some (k3, None) + | Some contribs when Hashtbl.length contribs = 0 -> + Hashtbl.remove contributions k3; + Hashtbl.remove target k3; + Some (k3, None) + | Some contribs -> + let values = Hashtbl.fold (fun _ v acc -> v :: acc) contribs [] in + let merged = + match values with + | [] -> assert false + | [v] -> v + | v :: rest -> List.fold_left merge_fn v rest + in + Hashtbl.replace target k3 merged; + Some (k3, Some merged) + in + + let remove_left_contributions k1 = + match Hashtbl.find_opt provenance k1 with + | None -> [] + | Some target_keys -> + Hashtbl.remove provenance k1; + List.iter + (fun k3 -> + match Hashtbl.find_opt contributions k3 with + | None -> () + | Some contribs -> Hashtbl.remove contribs k1) + target_keys; + target_keys + in + + let add_left_contributions k1 entries = + let target_keys = List.map fst entries in + Hashtbl.replace provenance k1 target_keys; + List.iter + (fun (k3, v3) -> + let contribs = + match Hashtbl.find_opt contributions k3 with + | Some c -> c + | None -> + let c = Hashtbl.create 4 in + Hashtbl.replace contributions k3 c; + c + in + Hashtbl.replace contribs k1 v3) + entries; + target_keys + in + + let process_left_entry k1 v1 = + let old_affected = remove_left_contributions k1 in + (* Update right key tracking *) + (match Hashtbl.find_opt left_to_right_key k1 with + | Some old_k2 -> ( + Hashtbl.remove left_to_right_key k1; + match Hashtbl.find_opt right_key_to_left_keys old_k2 with + | Some keys -> + Hashtbl.replace right_key_to_left_keys old_k2 + (List.filter (fun k -> k <> k1) keys) + | None -> ()) + | None -> ()); + let k2 = key_of k1 v1 in + Hashtbl.replace left_to_right_key k1 k2; + let keys = + match Hashtbl.find_opt right_key_to_left_keys k2 with + | Some ks -> ks + | None -> [] + in + Hashtbl.replace right_key_to_left_keys k2 (k1 :: keys); + (* Compute output *) + let right_val = right.get k2 in + let new_entries = f k1 v1 right_val in + let new_affected = add_left_contributions k1 new_entries in + old_affected @ new_affected + in + + let remove_left_entry k1 = + Hashtbl.remove left_entries k1; + let affected = remove_left_contributions k1 in + (match Hashtbl.find_opt left_to_right_key k1 with + | Some k2 -> ( + Hashtbl.remove left_to_right_key k1; + match Hashtbl.find_opt right_key_to_left_keys k2 with + | Some keys -> + Hashtbl.replace right_key_to_left_keys k2 + (List.filter (fun k -> k <> k1) keys) + | None -> ()) + | None -> ()); + affected + in + + let process () = + (* Track input deltas *) + my_stats.deltas_received <- + my_stats.deltas_received + List.length !left_pending + + List.length !right_pending; + + (* Process both left and right pending *) + let left_entries_list = + !left_pending |> List.concat_map delta_to_entries |> merge_entries + in + let right_entries_list = + !right_pending |> List.concat_map delta_to_entries |> merge_entries + in + left_pending := []; + right_pending := []; + + my_stats.entries_received <- + my_stats.entries_received + + List.length left_entries_list + + List.length right_entries_list; + let left_adds, left_removes = count_adds_removes left_entries_list in + let right_adds, right_removes = count_adds_removes right_entries_list in + my_stats.adds_received <- my_stats.adds_received + left_adds + right_adds; + my_stats.removes_received <- + my_stats.removes_received + left_removes + right_removes; + + let all_affected = ref [] in + + (* Process left entries *) + List.iter + (fun (k1, v1_opt) -> + match v1_opt with + | Some v1 -> + Hashtbl.replace left_entries k1 v1; + let affected = process_left_entry k1 v1 in + all_affected := affected @ !all_affected + | None -> + let affected = remove_left_entry k1 in + all_affected := affected @ !all_affected) + left_entries_list; + + (* Process right entries: reprocess affected left entries *) + List.iter + (fun (k2, _) -> + match Hashtbl.find_opt right_key_to_left_keys k2 with + | None -> () + | Some left_keys -> + List.iter + (fun k1 -> + match Hashtbl.find_opt left_entries k1 with + | Some v1 -> + let affected = process_left_entry k1 v1 in + all_affected := affected @ !all_affected + | None -> ()) + left_keys) + right_entries_list; + + (* Deduplicate and compute outputs *) + let seen = Hashtbl.create (List.length !all_affected) in + let output_entries = + !all_affected + |> List.filter_map (fun k3 -> + if Hashtbl.mem seen k3 then None + else ( + Hashtbl.replace seen k3 (); + recompute_target k3)) + in + + if output_entries <> [] then ( + let num_adds, num_removes = count_changes output_entries in + my_stats.deltas_emitted <- my_stats.deltas_emitted + 1; + my_stats.entries_emitted <- + my_stats.entries_emitted + List.length output_entries; + my_stats.adds_emitted <- my_stats.adds_emitted + num_adds; + my_stats.removes_emitted <- my_stats.removes_emitted + num_removes; + let delta = Batch output_entries in + List.iter (fun h -> h delta) !subscribers) + in + + let _info = + Registry.register ~name ~level:my_level ~process ~stats:my_stats + in + Registry.add_edge ~from_name:left.name ~to_name:name ~label:"join"; + Registry.add_edge ~from_name:right.name ~to_name:name ~label:"join"; + Registry.add_combinator ~name:(name ^ "_join") ~shape:"join" + ~inputs:[left.name; right.name] ~output:name; + + (* Subscribe to sources: just accumulate *) + left.subscribe (fun delta -> + left_pending := delta :: !left_pending; + Registry.mark_dirty name); + + right.subscribe (fun delta -> + right_pending := delta :: !right_pending; + Registry.mark_dirty name); + + (* Initialize from existing data *) + left.iter (fun k1 v1 -> + Hashtbl.replace left_entries k1 v1; + let _ = process_left_entry k1 v1 in + ()); + + { + name; + subscribe = (fun h -> subscribers := h :: !subscribers); + iter = (fun f -> Hashtbl.iter f target); + get = (fun k -> Hashtbl.find_opt target k); + length = (fun () -> Hashtbl.length target); + stats = my_stats; + level = my_level; + } + +(** {1 Union} *) + +let union ~name (left : ('k, 'v) t) (right : ('k, 'v) t) ?merge () : ('k, 'v) t + = + let my_level = max left.level right.level + 1 in + let merge_fn = + match merge with + | Some m -> m + | None -> fun _ v -> v + in + + (* Internal state *) + let left_values : ('k, 'v) Hashtbl.t = Hashtbl.create 64 in + let right_values : ('k, 'v) Hashtbl.t = Hashtbl.create 64 in + let target : ('k, 'v) Hashtbl.t = Hashtbl.create 128 in + let subscribers = ref [] in + let my_stats = create_stats () in + + (* Separate pending buffers *) + let left_pending = ref [] in + let right_pending = ref [] in + + let recompute_target k = + match (Hashtbl.find_opt left_values k, Hashtbl.find_opt right_values k) with + | None, None -> + Hashtbl.remove target k; + Some (k, None) + | Some v, None | None, Some v -> + Hashtbl.replace target k v; + Some (k, Some v) + | Some lv, Some rv -> + let merged = merge_fn lv rv in + Hashtbl.replace target k merged; + Some (k, Some merged) + in + + let process () = + (* Track input deltas *) + my_stats.deltas_received <- + my_stats.deltas_received + List.length !left_pending + + List.length !right_pending; + + let left_entries = + !left_pending |> List.concat_map delta_to_entries |> merge_entries + in + let right_entries = + !right_pending |> List.concat_map delta_to_entries |> merge_entries + in + left_pending := []; + right_pending := []; + + my_stats.entries_received <- + my_stats.entries_received + List.length left_entries + + List.length right_entries; + let left_adds, left_removes = count_adds_removes left_entries in + let right_adds, right_removes = count_adds_removes right_entries in + my_stats.adds_received <- my_stats.adds_received + left_adds + right_adds; + my_stats.removes_received <- + my_stats.removes_received + left_removes + right_removes; + + let all_affected = ref [] in + + (* Apply left entries *) + List.iter + (fun (k, v_opt) -> + (match v_opt with + | Some v -> Hashtbl.replace left_values k v + | None -> Hashtbl.remove left_values k); + all_affected := k :: !all_affected) + left_entries; + + (* Apply right entries *) + List.iter + (fun (k, v_opt) -> + (match v_opt with + | Some v -> Hashtbl.replace right_values k v + | None -> Hashtbl.remove right_values k); + all_affected := k :: !all_affected) + right_entries; + + (* Deduplicate and compute outputs *) + let seen = Hashtbl.create (List.length !all_affected) in + let output_entries = + !all_affected + |> List.filter_map (fun k -> + if Hashtbl.mem seen k then None + else ( + Hashtbl.replace seen k (); + recompute_target k)) + in + + if output_entries <> [] then ( + let num_adds, num_removes = count_changes output_entries in + my_stats.deltas_emitted <- my_stats.deltas_emitted + 1; + my_stats.entries_emitted <- + my_stats.entries_emitted + List.length output_entries; + my_stats.adds_emitted <- my_stats.adds_emitted + num_adds; + my_stats.removes_emitted <- my_stats.removes_emitted + num_removes; + let delta = Batch output_entries in + List.iter (fun h -> h delta) !subscribers) + in + + let _info = + Registry.register ~name ~level:my_level ~process ~stats:my_stats + in + Registry.add_edge ~from_name:left.name ~to_name:name ~label:"union"; + Registry.add_edge ~from_name:right.name ~to_name:name ~label:"union"; + Registry.add_combinator ~name:(name ^ "_union") ~shape:"union" + ~inputs:[left.name; right.name] ~output:name; + + (* Subscribe to sources: just accumulate *) + left.subscribe (fun delta -> + left_pending := delta :: !left_pending; + Registry.mark_dirty name); + + right.subscribe (fun delta -> + right_pending := delta :: !right_pending; + Registry.mark_dirty name); + + (* Initialize from existing data - process left then right *) + left.iter (fun k v -> + Hashtbl.replace left_values k v; + let merged = merge_fn v v in + (* self-merge for single value *) + Hashtbl.replace target k merged); + right.iter (fun k v -> + Hashtbl.replace right_values k v; + (* Right takes precedence, but merge if left exists *) + let merged = + match Hashtbl.find_opt left_values k with + | Some lv -> merge_fn lv v + | None -> v + in + Hashtbl.replace target k merged); + + { + name; + subscribe = (fun h -> subscribers := h :: !subscribers); + iter = (fun f -> Hashtbl.iter f target); + get = (fun k -> Hashtbl.find_opt target k); + length = (fun () -> Hashtbl.length target); + stats = my_stats; + level = my_level; + } + +(** {1 Fixpoint} *) + +let fixpoint ~name ~(init : ('k, unit) t) ~(edges : ('k, 'k list) t) () : + ('k, unit) t = + let my_level = max init.level edges.level + 1 in + + (* Internal state *) + let current : ('k, unit) Hashtbl.t = Hashtbl.create 256 in + let edge_map : ('k, 'k list) Hashtbl.t = Hashtbl.create 256 in + let subscribers = ref [] in + let my_stats = create_stats () in + + (* Separate pending buffers *) + let init_pending = ref [] in + let edges_pending = ref [] in + + (* Track which nodes are roots *) + let roots : ('k, unit) Hashtbl.t = Hashtbl.create 64 in + + (* BFS helper to find all reachable from roots *) + let recompute_all () = + let new_current = Hashtbl.create (Hashtbl.length current) in + let frontier = Queue.create () in + + (* Start from all roots *) + Hashtbl.iter + (fun k () -> + Hashtbl.replace new_current k (); + Queue.add k frontier) + roots; + + (* BFS *) + while not (Queue.is_empty frontier) do + let k = Queue.pop frontier in + match Hashtbl.find_opt edge_map k with + | None -> () + | Some successors -> + List.iter + (fun succ -> + if not (Hashtbl.mem new_current succ) then ( + Hashtbl.replace new_current succ (); + Queue.add succ frontier)) + successors + done; + new_current + in + + let process () = + (* Track input deltas *) + my_stats.deltas_received <- + my_stats.deltas_received + List.length !init_pending + + List.length !edges_pending; + + let init_entries = + !init_pending |> List.concat_map delta_to_entries |> merge_entries + in + let edges_entries = + !edges_pending |> List.concat_map delta_to_entries |> merge_entries + in + init_pending := []; + edges_pending := []; + + my_stats.entries_received <- + my_stats.entries_received + List.length init_entries + + List.length edges_entries; + let init_adds, init_removes = count_adds_removes init_entries in + let edges_adds, edges_removes = count_adds_removes edges_entries in + my_stats.adds_received <- my_stats.adds_received + init_adds + edges_adds; + my_stats.removes_received <- + my_stats.removes_received + init_removes + edges_removes; + + let output_entries = ref [] in + let needs_full_recompute = ref false in + + (* Apply edge updates *) + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some successors -> + let old = Hashtbl.find_opt edge_map k in + Hashtbl.replace edge_map k successors; + (* If edges changed for a current node, may need recompute *) + if Hashtbl.mem current k && old <> Some successors then + needs_full_recompute := true + | None -> + if Hashtbl.mem edge_map k then ( + Hashtbl.remove edge_map k; + if Hashtbl.mem current k then needs_full_recompute := true)) + edges_entries; + + (* Apply init updates *) + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some () -> Hashtbl.replace roots k () + | None -> + if Hashtbl.mem roots k then ( + Hashtbl.remove roots k; + needs_full_recompute := true)) + init_entries; + + (* Either do incremental expansion or full recompute *) + (if !needs_full_recompute then ( + (* Full recompute: find what changed *) + let new_current = recompute_all () in + + (* Find removed entries *) + Hashtbl.iter + (fun k () -> + if not (Hashtbl.mem new_current k) then + output_entries := (k, None) :: !output_entries) + current; + + (* Find added entries *) + Hashtbl.iter + (fun k () -> + if not (Hashtbl.mem current k) then + output_entries := (k, Some ()) :: !output_entries) + new_current; + + (* Update current *) + Hashtbl.reset current; + Hashtbl.iter (fun k v -> Hashtbl.replace current k v) new_current) + else + (* Incremental: BFS from new roots *) + let frontier = Queue.create () in + + init_entries + |> List.iter (fun (k, v_opt) -> + match v_opt with + | Some () when not (Hashtbl.mem current k) -> + Hashtbl.replace current k (); + output_entries := (k, Some ()) :: !output_entries; + Queue.add k frontier + | _ -> ()); + + while not (Queue.is_empty frontier) do + let k = Queue.pop frontier in + match Hashtbl.find_opt edge_map k with + | None -> () + | Some successors -> + List.iter + (fun succ -> + if not (Hashtbl.mem current succ) then ( + Hashtbl.replace current succ (); + output_entries := (succ, Some ()) :: !output_entries; + Queue.add succ frontier)) + successors + done); + + if !output_entries <> [] then ( + let num_adds, num_removes = count_changes !output_entries in + my_stats.deltas_emitted <- my_stats.deltas_emitted + 1; + my_stats.entries_emitted <- + my_stats.entries_emitted + List.length !output_entries; + my_stats.adds_emitted <- my_stats.adds_emitted + num_adds; + my_stats.removes_emitted <- my_stats.removes_emitted + num_removes; + let delta = Batch !output_entries in + List.iter (fun h -> h delta) !subscribers) + in + + let _info = + Registry.register ~name ~level:my_level ~process ~stats:my_stats + in + Registry.add_edge ~from_name:init.name ~to_name:name ~label:"roots"; + Registry.add_edge ~from_name:edges.name ~to_name:name ~label:"edges"; + Registry.add_combinator ~name:(name ^ "_fp") ~shape:"fixpoint" + ~inputs:[init.name; edges.name] ~output:name; + + (* Subscribe to sources: just accumulate *) + init.subscribe (fun delta -> + init_pending := delta :: !init_pending; + Registry.mark_dirty name); + + edges.subscribe (fun delta -> + edges_pending := delta :: !edges_pending; + Registry.mark_dirty name); + + (* Initialize from existing data *) + (* First, copy edges *) + edges.iter (fun k v -> Hashtbl.replace edge_map k v); + (* Then, BFS from existing init values *) + let frontier = Queue.create () in + init.iter (fun k () -> + Hashtbl.replace roots k (); + (* Track roots *) + if not (Hashtbl.mem current k) then ( + Hashtbl.replace current k (); + Queue.add k frontier)); + while not (Queue.is_empty frontier) do + let k = Queue.pop frontier in + match Hashtbl.find_opt edge_map k with + | None -> () + | Some successors -> + List.iter + (fun succ -> + if not (Hashtbl.mem current succ) then ( + Hashtbl.replace current succ (); + Queue.add succ frontier)) + successors + done; + + { + name; + subscribe = (fun h -> subscribers := h :: !subscribers); + iter = (fun f -> Hashtbl.iter f current); + get = (fun k -> Hashtbl.find_opt current k); + length = (fun () -> Hashtbl.length current); + stats = my_stats; + level = my_level; + } + +(** {1 Utilities} *) + +let to_mermaid () = Registry.to_mermaid () +let print_stats () = Registry.print_stats () +let reset () = Registry.clear () +let reset_stats () = Registry.reset_stats () diff --git a/analysis/reactive/src/Reactive.mli b/analysis/reactive/src/Reactive.mli new file mode 100644 index 0000000000..8964baed03 --- /dev/null +++ b/analysis/reactive/src/Reactive.mli @@ -0,0 +1,169 @@ +(** Reactive V2: Accumulate-then-propagate scheduler for glitch-free semantics. + + Key design: + 1. Nodes accumulate batch deltas (don't process immediately) + 2. Scheduler visits nodes in dependency order + 3. Each node processes accumulated deltas exactly once per wave + + This eliminates glitches from multi-level dependencies by construction. *) + +(** {1 Deltas} *) + +type ('k, 'v) delta = + | Set of 'k * 'v + | Remove of 'k + | Batch of ('k * 'v option) list + (** Batch of updates: (key, Some value) = set, (key, None) = remove *) + +val set : 'k -> 'v -> 'k * 'v option +(** Create a batch entry that sets a key *) + +val remove : 'k -> 'k * 'v option +(** Create a batch entry that removes a key *) + +val delta_to_entries : ('k, 'v) delta -> ('k * 'v option) list +(** Convert delta to batch entries *) + +(** {1 Statistics} *) + +type stats = { + (* Input tracking *) + mutable deltas_received: int; + (** Number of delta messages (Set/Remove/Batch) *) + mutable entries_received: int; (** Total entries after expanding batches *) + mutable adds_received: int; (** Set operations received from upstream *) + mutable removes_received: int; + (** Remove operations received from upstream *) + (* Processing tracking *) + mutable process_count: int; (** Times process() was called *) + mutable process_time_ns: int64; (** Total time in process() *) + (* Output tracking *) + mutable deltas_emitted: int; (** Number of delta messages emitted *) + mutable entries_emitted: int; (** Total entries in emitted deltas *) + mutable adds_emitted: int; (** Set operations emitted downstream *) + mutable removes_emitted: int; (** Remove operations emitted downstream *) +} +(** Per-node statistics for diagnostics *) + +val create_stats : unit -> stats + +(** {1 Node Registry} *) + +module Registry : sig + type node_info + (** Information about a registered node *) + + val clear : unit -> unit + (** Clear all registered nodes *) + + val to_mermaid : unit -> string + (** Generate a Mermaid diagram of the pipeline *) + + val print_stats : unit -> unit + (** Print timing statistics for all nodes *) +end + +(** {1 Scheduler} *) + +module Scheduler : sig + val propagate : unit -> unit + (** Process all dirty nodes in topological order. + Called automatically when a source emits. *) + + val is_propagating : unit -> bool + (** Returns true if currently in a propagation wave *) + + val wave_count : unit -> int + (** Number of propagation waves executed *) + + val reset_wave_count : unit -> unit + (** Reset the wave counter *) +end + +(** {1 Collection Interface} *) + +type ('k, 'v) t = { + name: string; + subscribe: (('k, 'v) delta -> unit) -> unit; + iter: ('k -> 'v -> unit) -> unit; + get: 'k -> 'v option; + length: unit -> int; + stats: stats; + level: int; +} +(** A named reactive collection at a specific topological level *) + +val iter : ('k -> 'v -> unit) -> ('k, 'v) t -> unit +val get : ('k, 'v) t -> 'k -> 'v option +val length : ('k, 'v) t -> int +val stats : ('k, 'v) t -> stats +val level : ('k, 'v) t -> int +val name : ('k, 'v) t -> string + +(** {1 Source Collection} *) + +val source : name:string -> unit -> ('k, 'v) t * (('k, 'v) delta -> unit) +(** Create a named source collection. + Returns the collection and an emit function. + Emitting triggers propagation through the pipeline. *) + +(** {1 Combinators} *) + +val flatMap : + name:string -> + ('k1, 'v1) t -> + f:('k1 -> 'v1 -> ('k2 * 'v2) list) -> + ?merge:('v2 -> 'v2 -> 'v2) -> + unit -> + ('k2, 'v2) t +(** Transform each entry into zero or more output entries. + Optional merge function combines values for the same output key. *) + +val join : + name:string -> + ('k1, 'v1) t -> + ('k2, 'v2) t -> + key_of:('k1 -> 'v1 -> 'k2) -> + f:('k1 -> 'v1 -> 'v2 option -> ('k3 * 'v3) list) -> + ?merge:('v3 -> 'v3 -> 'v3) -> + unit -> + ('k3, 'v3) t +(** Join left collection with right collection. + For each left entry, looks up the key in right. + Separate left/right pending buffers ensure glitch-freedom. *) + +val union : + name:string -> + ('k, 'v) t -> + ('k, 'v) t -> + ?merge:('v -> 'v -> 'v) -> + unit -> + ('k, 'v) t +(** Combine two collections. + Optional merge function combines values for the same key. + Separate left/right pending buffers ensure glitch-freedom. *) + +val fixpoint : + name:string -> + init:('k, unit) t -> + edges:('k, 'k list) t -> + unit -> + ('k, unit) t +(** Compute transitive closure. + init: initial roots + edges: k -> successors + Returns: all reachable keys from roots *) + +(** {1 Utilities} *) + +val to_mermaid : unit -> string +(** Generate Mermaid diagram of the pipeline *) + +val print_stats : unit -> unit +(** Print per-node timing statistics *) + +val reset : unit -> unit +(** Clear all registered nodes (for tests) *) + +val reset_stats : unit -> unit +(** Reset all node statistics to zero (keeps nodes intact) *) diff --git a/analysis/reactive/src/ReactiveFileCollection.ml b/analysis/reactive/src/ReactiveFileCollection.ml new file mode 100644 index 0000000000..bcae68a0b7 --- /dev/null +++ b/analysis/reactive/src/ReactiveFileCollection.ml @@ -0,0 +1,111 @@ +(** Reactive File Collection + + Creates a reactive collection from files with automatic change detection. *) + +type file_id = {mtime: float; size: int; ino: int} +(** File identity for change detection *) + +let get_file_id path : file_id = + let st = Unix.stat path in + {mtime = st.Unix.st_mtime; size = st.Unix.st_size; ino = st.Unix.st_ino} + +let file_changed ~old_id ~new_id = + old_id.mtime <> new_id.mtime + || old_id.size <> new_id.size || old_id.ino <> new_id.ino + +type ('raw, 'v) internal = { + cache: (string, file_id * 'v) Hashtbl.t; + read_file: string -> 'raw; + process: string -> 'raw -> 'v; (* path -> raw -> value *) +} +(** Internal state for file collection *) + +type ('raw, 'v) t = { + internal: ('raw, 'v) internal; + collection: (string, 'v) Reactive.t; + emit: (string, 'v) Reactive.delta -> unit; +} +(** A file collection is just a Reactive.t with some extra operations *) + +(** Create a new reactive file collection *) +let create ~read_file ~process : ('raw, 'v) t = + let internal = {cache = Hashtbl.create 256; read_file; process} in + let collection, emit = Reactive.source ~name:"file_collection" () in + {internal; collection; emit} + +(** Get the collection interface for composition *) +let to_collection t : (string, 'v) Reactive.t = t.collection + +(** Emit a delta *) +let emit t delta = t.emit delta + +(** Process a file if changed. Emits delta to subscribers. *) +let process_if_changed t path = + let new_id = get_file_id path in + match Hashtbl.find_opt t.internal.cache path with + | Some (old_id, _) when not (file_changed ~old_id ~new_id) -> + false (* unchanged *) + | _ -> + let raw = t.internal.read_file path in + let value = t.internal.process path raw in + Hashtbl.replace t.internal.cache path (new_id, value); + emit t (Reactive.Set (path, value)); + true (* changed *) + +(** Process multiple files (emits individual deltas) *) +let process_files t paths = + List.iter (fun path -> ignore (process_if_changed t path)) paths + +(** Process a file without emitting. Returns batch entry if changed. *) +let process_file_silent t path = + let new_id = get_file_id path in + match Hashtbl.find_opt t.internal.cache path with + | Some (old_id, _) when not (file_changed ~old_id ~new_id) -> + None (* unchanged *) + | _ -> + let raw = t.internal.read_file path in + let value = t.internal.process path raw in + Hashtbl.replace t.internal.cache path (new_id, value); + Some (Reactive.set path value) + +(** Process multiple files and emit as a single batch. + More efficient than process_files when processing many files at once. *) +let process_files_batch t paths = + let entries = + paths |> List.filter_map (fun path -> process_file_silent t path) + in + if entries <> [] then emit t (Reactive.Batch entries); + List.length entries + +(** Remove a file *) +let remove t path = + Hashtbl.remove t.internal.cache path; + emit t (Reactive.Remove path) + +(** Remove multiple files as a batch *) +let remove_batch t paths = + let entries = + paths + |> List.filter_map (fun path -> + if Hashtbl.mem t.internal.cache path then ( + Hashtbl.remove t.internal.cache path; + Some (path, None)) + else None) + in + if entries <> [] then emit t (Reactive.Batch entries); + List.length entries + +(** Clear all cached data *) +let clear t = Hashtbl.clear t.internal.cache + +(** Invalidate a path *) +let invalidate t path = Hashtbl.remove t.internal.cache path + +let get t path = + match Hashtbl.find_opt t.internal.cache path with + | Some (_, v) -> Some v + | None -> None + +let mem t path = Hashtbl.mem t.internal.cache path +let length t = Reactive.length t.collection +let iter f t = Reactive.iter f t.collection diff --git a/analysis/reactive/src/ReactiveFileCollection.mli b/analysis/reactive/src/ReactiveFileCollection.mli new file mode 100644 index 0000000000..e50c661828 --- /dev/null +++ b/analysis/reactive/src/ReactiveFileCollection.mli @@ -0,0 +1,71 @@ +(** Reactive File Collection + + Creates a reactive collection from files with automatic change detection. + + {2 Example} + + {[ + (* Create file collection *) + let files = ReactiveFileCollection.create + ~read_file:Cmt_format.read_cmt + ~process:(fun path cmt -> extract_data path cmt) + + (* Compose with flatMap *) + let decls = Reactive.flatMap ~name:"decls" (ReactiveFileCollection.to_collection files) + ~f:(fun _path data -> data.decls) + () + + (* Process files - decls updates automatically *) + ReactiveFileCollection.process_files files [file_a; file_b]; + + (* Read results *) + Reactive.iter (fun pos decl -> ...) decls + ]} *) + +type ('raw, 'v) t +(** A file collection. ['raw] is the raw file type, ['v] is the processed value. *) + +(** {1 Creation} *) + +val create : + read_file:(string -> 'raw) -> process:(string -> 'raw -> 'v) -> ('raw, 'v) t +(** Create a new file collection. + [process path raw] receives the file path and raw content to produce the value. *) + +(** {1 Composition} *) + +val to_collection : ('raw, 'v) t -> (string, 'v) Reactive.t +(** Get the reactive collection interface for use with [Reactive.flatMap]. *) + +(** {1 Processing} *) + +val process_files : ('raw, 'v) t -> string list -> unit +(** Process files, emitting individual deltas for each changed file. *) + +val process_files_batch : ('raw, 'v) t -> string list -> int +(** Process files, emitting a single [Batch] delta with all changes. + Returns the number of files that changed. + More efficient than [process_files] when processing many files at once, + as downstream combinators can process all changes together. *) + +val process_if_changed : ('raw, 'v) t -> string -> bool +(** Process a file if changed. Returns true if file was processed. *) + +val remove : ('raw, 'v) t -> string -> unit +(** Remove a file from the collection. *) + +val remove_batch : ('raw, 'v) t -> string list -> int +(** Remove multiple files as a batch. Returns the number of files removed. + More efficient than calling [remove] multiple times. *) + +(** {1 Cache Management} *) + +val invalidate : ('raw, 'v) t -> string -> unit +val clear : ('raw, 'v) t -> unit + +(** {1 Access} *) + +val get : ('raw, 'v) t -> string -> 'v option +val mem : ('raw, 'v) t -> string -> bool +val length : ('raw, 'v) t -> int +val iter : (string -> 'v -> unit) -> ('raw, 'v) t -> unit diff --git a/analysis/reactive/src/dune b/analysis/reactive/src/dune new file mode 100644 index 0000000000..4fb933961f --- /dev/null +++ b/analysis/reactive/src/dune @@ -0,0 +1,4 @@ +(library + (name reactive) + (wrapped false) + (libraries unix)) diff --git a/analysis/reactive/test/BatchTest.ml b/analysis/reactive/test/BatchTest.ml new file mode 100644 index 0000000000..4c750d16cf --- /dev/null +++ b/analysis/reactive/test/BatchTest.ml @@ -0,0 +1,87 @@ +(** Batch processing tests *) + +open Reactive +open TestHelpers + +let test_batch_flatmap () = + reset (); + Printf.printf "=== Test: batch flatmap ===\n"; + + let source, emit = source ~name:"source" () in + let derived = + flatMap ~name:"derived" source ~f:(fun k v -> [(k ^ "_derived", v * 2)]) () + in + + (* Subscribe to track what comes out *) + let received_batches = ref 0 in + let received_entries = ref [] in + subscribe + (function + | Batch entries -> + incr received_batches; + received_entries := entries @ !received_entries + | Set (k, v) -> received_entries := [(k, Some v)] @ !received_entries + | Remove k -> received_entries := [(k, None)] @ !received_entries) + derived; + + (* Send a batch *) + emit_batch [set "a" 1; set "b" 2; set "c" 3] emit; + + Printf.printf "Received batches: %d, entries: %d\n" !received_batches + (List.length !received_entries); + assert (!received_batches = 1); + assert (List.length !received_entries = 3); + assert (get derived "a_derived" = Some 2); + assert (get derived "b_derived" = Some 4); + assert (get derived "c_derived" = Some 6); + + Printf.printf "PASSED\n\n" + +let test_batch_fixpoint () = + reset (); + Printf.printf "=== Test: batch fixpoint ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Track batches received *) + let batch_count = ref 0 in + let total_added = ref 0 in + subscribe + (function + | Batch entries -> + incr batch_count; + entries + |> List.iter (fun (_, v_opt) -> + match v_opt with + | Some () -> incr total_added + | None -> ()) + | Set (_, ()) -> incr total_added + | Remove _ -> ()) + fp; + + (* Set up edges first *) + emit_edges (Set ("a", ["b"; "c"])); + emit_edges (Set ("b", ["d"])); + + (* Send batch of roots *) + emit_batch [set "a" (); set "x" ()] emit_init; + + Printf.printf "Batch count: %d, total added: %d\n" !batch_count !total_added; + Printf.printf "fp length: %d\n" (length fp); + (* Should have a, b, c, d (reachable from a) and x (standalone root) *) + assert (length fp = 5); + assert (get fp "a" = Some ()); + assert (get fp "b" = Some ()); + assert (get fp "c" = Some ()); + assert (get fp "d" = Some ()); + assert (get fp "x" = Some ()); + + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== Batch Tests ======\n\n"; + test_batch_flatmap (); + test_batch_fixpoint () diff --git a/analysis/reactive/test/FixpointBasicTest.ml b/analysis/reactive/test/FixpointBasicTest.ml new file mode 100644 index 0000000000..b978ea9468 --- /dev/null +++ b/analysis/reactive/test/FixpointBasicTest.ml @@ -0,0 +1,212 @@ +(** Basic fixpoint graph traversal tests *) + +open Reactive + +let test_fixpoint () = + reset (); + Printf.printf "Test: fixpoint\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Set up graph: 1 -> [2, 3], 2 -> [4], 3 -> [4] *) + emit_edges (Set (1, [2; 3])); + emit_edges (Set (2, [4])); + emit_edges (Set (3, [4])); + + (* Compute fixpoint *) + let reachable = fixpoint ~name:"reachable" ~init ~edges () in + + (* Initially empty *) + Printf.printf "Initially: length=%d\n" (length reachable); + assert (length reachable = 0); + + (* Add root 1 *) + emit_init (Set (1, ())); + Printf.printf "After adding root 1: length=%d\n" (length reachable); + assert (length reachable = 4); + (* 1, 2, 3, 4 *) + assert (get reachable 1 = Some ()); + assert (get reachable 2 = Some ()); + assert (get reachable 3 = Some ()); + assert (get reachable 4 = Some ()); + assert (get reachable 5 = None); + + (* Add another root 5 with edge 5 -> [6] *) + emit_edges (Set (5, [6])); + emit_init (Set (5, ())); + Printf.printf "After adding root 5: length=%d\n" (length reachable); + assert (length reachable = 6); + + (* 1, 2, 3, 4, 5, 6 *) + + (* Remove root 1 *) + emit_init (Remove 1); + Printf.printf "After removing root 1: length=%d\n" (length reachable); + assert (length reachable = 2); + (* 5, 6 *) + assert (get reachable 1 = None); + assert (get reachable 5 = Some ()); + assert (get reachable 6 = Some ()); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_basic_expansion () = + reset (); + Printf.printf "=== Test: fixpoint basic expansion ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b -> c *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("b", ["c"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + + assert (length fp = 3); + assert (get fp "a" = Some ()); + assert (get fp "b" = Some ()); + assert (get fp "c" = Some ()); + assert (get fp "d" = None); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_multiple_roots () = + reset (); + Printf.printf "=== Test: fixpoint multiple roots ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b, c -> d (disconnected components) *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("c", ["d"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + emit_init (Set ("c", ())); + + assert (length fp = 4); + assert (get fp "a" = Some ()); + assert (get fp "b" = Some ()); + assert (get fp "c" = Some ()); + assert (get fp "d" = Some ()); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_diamond () = + reset (); + Printf.printf "=== Test: fixpoint diamond ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b, a -> c, b -> d, c -> d *) + emit_edges (Set ("a", ["b"; "c"])); + emit_edges (Set ("b", ["d"])); + emit_edges (Set ("c", ["d"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + + assert (length fp = 4); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_cycle () = + reset (); + Printf.printf "=== Test: fixpoint cycle ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b -> c -> b (cycle from root) *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("b", ["c"])); + emit_edges (Set ("c", ["b"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + + assert (length fp = 3); + assert (get fp "a" = Some ()); + assert (get fp "b" = Some ()); + assert (get fp "c" = Some ()); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_empty_base () = + reset (); + Printf.printf "=== Test: fixpoint empty base ===\n"; + + let init, _emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + emit_edges (Set ("a", ["b"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + assert (length fp = 0); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_self_loop () = + reset (); + Printf.printf "=== Test: fixpoint self loop ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> a (self loop) *) + emit_edges (Set ("a", ["a"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + + assert (length fp = 1); + assert (get fp "a" = Some ()); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_existing_data () = + reset (); + Printf.printf "=== Test: fixpoint with existing data ===\n"; + + (* Create source and pre-populate *) + let init, emit_init = source ~name:"init" () in + emit_init (Set ("root", ())); + + let edges, emit_edges = source ~name:"edges" () in + emit_edges (Set ("root", ["a"; "b"])); + emit_edges (Set ("a", ["c"])); + + (* Create fixpoint - should immediately have all reachable *) + let fp = fixpoint ~name:"fp" ~init ~edges () in + + Printf.printf "Fixpoint length: %d (expected 4)\n" (length fp); + assert (length fp = 4); + (* root, a, b, c *) + assert (get fp "root" = Some ()); + assert (get fp "a" = Some ()); + assert (get fp "b" = Some ()); + assert (get fp "c" = Some ()); + + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== Fixpoint Basic Tests ======\n\n"; + test_fixpoint (); + test_fixpoint_basic_expansion (); + test_fixpoint_multiple_roots (); + test_fixpoint_diamond (); + test_fixpoint_cycle (); + test_fixpoint_empty_base (); + test_fixpoint_self_loop (); + test_fixpoint_existing_data () diff --git a/analysis/reactive/test/FixpointIncrementalTest.ml b/analysis/reactive/test/FixpointIncrementalTest.ml new file mode 100644 index 0000000000..e7fb6c086e --- /dev/null +++ b/analysis/reactive/test/FixpointIncrementalTest.ml @@ -0,0 +1,690 @@ +(** Incremental fixpoint update tests (add/remove base and edges) *) + +open Reactive +open TestHelpers + +let test_fixpoint_add_base () = + reset (); + Printf.printf "=== Test: fixpoint add base ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b, c -> d *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("c", ["d"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + assert (length fp = 2); + + (* a, b *) + + (* Track changes via subscription *) + let added = ref [] in + let removed = ref [] in + subscribe + (function + | Set (k, ()) -> added := k :: !added + | Remove k -> removed := k :: !removed + | Batch entries -> + entries + |> List.iter (fun (k, v_opt) -> + match v_opt with + | Some () -> added := k :: !added + | None -> removed := k :: !removed)) + fp; + + emit_init (Set ("c", ())); + + Printf.printf "Added: [%s]\n" (String.concat ", " !added); + assert (List.length !added = 2); + (* c, d *) + assert (List.mem "c" !added); + assert (List.mem "d" !added); + assert (!removed = []); + assert (length fp = 4); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_base () = + reset (); + Printf.printf "=== Test: fixpoint remove base ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b -> c *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("b", ["c"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + assert (length fp = 3); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + emit_init (Remove "a"); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + assert (List.length !removed = 3); + assert (length fp = 0); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_add_edge () = + reset (); + Printf.printf "=== Test: fixpoint add edge ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + assert (length fp = 1); + + (* just a *) + let added = ref [] in + subscribe + (function + | Set (k, ()) -> added := k :: !added + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = Some () then added := k :: !added) + entries + | _ -> ()) + fp; + + (* Add edge a -> b *) + emit_edges (Set ("a", ["b"])); + + Printf.printf "Added: [%s]\n" (String.concat ", " !added); + assert (List.mem "b" !added); + assert (length fp = 2); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_edge () = + reset (); + Printf.printf "=== Test: fixpoint remove edge ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b -> c *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("b", ["c"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + assert (length fp = 3); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove edge a -> b *) + emit_edges (Set ("a", [])); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + assert (List.length !removed = 2); + (* b, c *) + assert (length fp = 1); + + (* just a *) + Printf.printf "PASSED\n\n" + +let test_fixpoint_cycle_removal () = + reset (); + Printf.printf "=== Test: fixpoint cycle removal (well-founded) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b -> c -> b (b-c cycle reachable from a) *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("b", ["c"])); + emit_edges (Set ("c", ["b"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + assert (length fp = 3); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove edge a -> b *) + emit_edges (Set ("a", [])); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + (* Both b and c should be removed - cycle has no well-founded support *) + assert (List.length !removed = 2); + assert (List.mem "b" !removed); + assert (List.mem "c" !removed); + assert (length fp = 1); + + (* just a *) + Printf.printf "PASSED\n\n" + +let test_fixpoint_alternative_support () = + reset (); + Printf.printf "=== Test: fixpoint alternative support ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Graph: a -> b, a -> c -> b + If we remove a -> b, b should survive via a -> c -> b *) + emit_edges (Set ("a", ["b"; "c"])); + emit_edges (Set ("c", ["b"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("a", ())); + assert (length fp = 3); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove direct edge a -> b (but keep a -> c) *) + emit_edges (Set ("a", ["c"])); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + (* b should NOT be removed - still reachable via c *) + assert (!removed = []); + assert (length fp = 3); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_deltas () = + reset (); + Printf.printf "=== Test: fixpoint delta emissions ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + emit_edges (Set (1, [2; 3])); + emit_edges (Set (2, [4])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Count entries, not deltas - V2 emits batches *) + let all_entries = ref [] in + subscribe + (function + | Set (k, v) -> all_entries := (k, Some v) :: !all_entries + | Remove k -> all_entries := (k, None) :: !all_entries + | Batch entries -> all_entries := entries @ !all_entries) + fp; + + (* Add root *) + emit_init (Set (1, ())); + Printf.printf "After add root: %d entries\n" (List.length !all_entries); + assert (List.length !all_entries = 4); + + (* 1, 2, 3, 4 *) + all_entries := []; + + (* Add edge 3 -> 5 *) + emit_edges (Set (3, [5])); + Printf.printf "After add edge 3->5: %d entries\n" (List.length !all_entries); + assert (List.length !all_entries = 1); + + (* 5 added *) + all_entries := []; + + (* Remove root (should remove all) *) + emit_init (Remove 1); + Printf.printf "After remove root: %d entries\n" (List.length !all_entries); + assert (List.length !all_entries = 5); + + (* 1, 2, 3, 4, 5 removed *) + Printf.printf "PASSED\n\n" + +(* Test: Remove from init but still reachable via edges *) +let test_fixpoint_remove_spurious_root () = + reset (); + Printf.printf + "=== Test: fixpoint remove spurious root (still reachable) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Track all deltas *) + let added = ref [] in + let removed = ref [] in + subscribe + (function + | Set (k, ()) -> added := k :: !added + | Remove k -> removed := k :: !removed + | Batch entries -> + entries + |> List.iter (fun (k, v_opt) -> + match v_opt with + | Some () -> added := k :: !added + | None -> removed := k :: !removed)) + fp; + + (* Step 1: "b" is spuriously marked as a root *) + emit_init (Set ("b", ())); + Printf.printf "After spurious root b: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + assert (get fp "b" = Some ()); + + (* Step 2: The real root "root" is added *) + emit_init (Set ("root", ())); + Printf.printf "After true root: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + + (* Step 3: Edge root -> a is added *) + emit_edges (Set ("root", ["a"])); + Printf.printf "After edge root->a: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + assert (get fp "a" = Some ()); + + (* Step 4: Edge a -> b is added *) + emit_edges (Set ("a", ["b"])); + Printf.printf "After edge a->b: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + + assert (length fp = 3); + + added := []; + removed := []; + + (* Step 5: The spurious root "b" is REMOVED from init *) + emit_init (Remove "b"); + + Printf.printf "After removing b from init: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + + (* b should NOT be removed - still reachable via a *) + assert (not (List.mem "b" !removed)); + assert (get fp "b" = Some ()); + assert (length fp = 3); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_edge_entry_alternative_source () = + reset (); + Printf.printf + "=== Test: fixpoint remove edge entry (alternative source) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Set up initial edges: a -> b, c -> b *) + emit_edges (Set ("a", ["b"])); + emit_edges (Set ("c", ["b"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Track changes *) + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Add roots a and c *) + emit_init (Set ("a", ())); + emit_init (Set ("c", ())); + + Printf.printf "Initial: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + + assert (length fp = 3); + + removed := []; + + (* Remove entire edge entry for "a" *) + emit_edges (Remove "a"); + + Printf.printf "After Remove edge entry 'a': fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + + (* b should NOT be removed - still reachable via c -> b *) + assert (not (List.mem "b" !removed)); + assert (get fp "b" = Some ()); + assert (length fp = 3); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_edge_rederivation () = + reset (); + Printf.printf "=== Test: fixpoint remove edge (re-derivation needed) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Track changes *) + let removed = ref [] in + let added = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Set (k, ()) -> added := k :: !added + | Batch entries -> + entries + |> List.iter (fun (k, v_opt) -> + match v_opt with + | Some () -> added := k :: !added + | None -> removed := k :: !removed)) + fp; + + (* Add root *) + emit_init (Set ("root", ())); + + (* Build graph: root -> a -> b -> c, a -> c *) + emit_edges (Set ("root", ["a"])); + emit_edges (Set ("a", ["b"; "c"])); + emit_edges (Set ("b", ["c"])); + + Printf.printf "Initial: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + + assert (length fp = 4); + + removed := []; + added := []; + + (* Remove the direct edge a -> c *) + emit_edges (Set ("a", ["b"])); + + Printf.printf "After removing a->c: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + Printf.printf "Removed: [%s], Added: [%s]\n" + (String.concat ", " !removed) + (String.concat ", " !added); + + (* c should still be in fixpoint - reachable via root -> a -> b -> c *) + assert (get fp "c" = Some ()); + assert (length fp = 4); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_edge_entry_rederivation () = + reset (); + Printf.printf "=== Test: fixpoint Remove edge entry (re-derivation) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Set up edges before creating fixpoint *) + emit_edges (Set ("a", ["c"])); + emit_edges (Set ("b", ["c"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Track changes *) + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Add roots a and b *) + emit_init (Set ("a", ())); + emit_init (Set ("b", ())); + + Printf.printf "Initial: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + + assert (length fp = 3); + + removed := []; + + (* Remove entire edge entry for "a" using Remove delta *) + emit_edges (Remove "a"); + + Printf.printf "After Remove 'a' entry: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + + (* c should survive - b -> c still exists *) + assert (not (List.mem "c" !removed)); + assert (get fp "c" = Some ()); + assert (length fp = 3); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_edge_entry_higher_rank_support () = + reset (); + Printf.printf "=== Test: fixpoint edge removal (higher rank support) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Track changes *) + let removed = ref [] in + let added = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Set (k, ()) -> added := k :: !added + | Batch entries -> + entries + |> List.iter (fun (k, v_opt) -> + match v_opt with + | Some () -> added := k :: !added + | None -> removed := k :: !removed)) + fp; + + (* Add root *) + emit_init (Set ("root", ())); + + (* Build graph: root -> a -> b -> c, a -> c *) + emit_edges (Set ("root", ["a"])); + emit_edges (Set ("a", ["b"; "c"])); + emit_edges (Set ("b", ["c"])); + + Printf.printf "Initial: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + + assert (length fp = 4); + assert (get fp "c" = Some ()); + + removed := []; + added := []; + + (* Remove direct edge a -> c, keeping a -> b *) + emit_edges (Set ("a", ["b"])); + + Printf.printf "After removing a->c: fp=[%s]\n" + (let items = ref [] in + iter (fun k _ -> items := k :: !items) fp; + String.concat ", " (List.sort String.compare !items)); + Printf.printf "Removed: [%s], Added: [%s]\n" + (String.concat ", " !removed) + (String.concat ", " !added); + + (* c should still be in fixpoint via root -> a -> b -> c *) + assert (get fp "c" = Some ()); + assert (length fp = 4); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_edge_entry_needs_rederivation () = + reset (); + Printf.printf + "=== Test: fixpoint Remove edge entry (needs re-derivation) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Pre-populate edges so fixpoint initializes with them *) + emit_edges (Set ("r", ["a"; "b"])); + emit_edges (Set ("a", ["y"])); + emit_edges (Set ("b", ["c"])); + emit_edges (Set ("c", ["x"])); + emit_edges (Set ("x", ["y"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + (* Make r live *) + emit_init (Set ("r", ())); + + (* Sanity: y initially reachable via short path *) + assert (get fp "y" = Some ()); + assert (get fp "x" = Some ()); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove the entire edge entry for a (removes a->y) *) + emit_edges (Remove "a"); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + + (* Correct: y is still reachable via r->b->c->x->y *) + assert (get fp "y" = Some ()); + + Printf.printf "PASSED\n\n" + +let test_fixpoint_remove_base_needs_rederivation () = + reset (); + Printf.printf + "=== Test: fixpoint Remove base element (needs re-derivation) ===\n"; + + let init, emit_init = source ~name:"init" () in + let edges, emit_edges = source ~name:"edges" () in + + (* Pre-populate edges so fixpoint initializes with them *) + emit_edges (Set ("r1", ["a"])); + emit_edges (Set ("a", ["y"])); + emit_edges (Set ("r2", ["b"])); + emit_edges (Set ("b", ["c"])); + emit_edges (Set ("c", ["x"])); + emit_edges (Set ("x", ["y"])); + + let fp = fixpoint ~name:"fp" ~init ~edges () in + + emit_init (Set ("r1", ())); + emit_init (Set ("r2", ())); + + (* Sanity: y initially reachable *) + assert (get fp "y" = Some ()); + assert (get fp "x" = Some ()); + + let removed = ref [] in + subscribe + (function + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> if v_opt = None then removed := k :: !removed) + entries + | _ -> ()) + fp; + + (* Remove r1 from base: y should remain via r2 path *) + emit_init (Remove "r1"); + + Printf.printf "Removed: [%s]\n" (String.concat ", " !removed); + + assert (get fp "y" = Some ()); + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== Fixpoint Incremental Tests ======\n\n"; + test_fixpoint_add_base (); + test_fixpoint_remove_base (); + test_fixpoint_add_edge (); + test_fixpoint_remove_edge (); + test_fixpoint_cycle_removal (); + test_fixpoint_alternative_support (); + test_fixpoint_deltas (); + test_fixpoint_remove_spurious_root (); + test_fixpoint_remove_edge_entry_alternative_source (); + test_fixpoint_remove_edge_rederivation (); + test_fixpoint_remove_edge_entry_rederivation (); + test_fixpoint_remove_edge_entry_higher_rank_support (); + test_fixpoint_remove_edge_entry_needs_rederivation (); + test_fixpoint_remove_base_needs_rederivation () diff --git a/analysis/reactive/test/FlatMapTest.ml b/analysis/reactive/test/FlatMapTest.ml new file mode 100644 index 0000000000..b9d2050469 --- /dev/null +++ b/analysis/reactive/test/FlatMapTest.ml @@ -0,0 +1,166 @@ +(** FlatMap combinator tests *) + +open Reactive +open TestHelpers + +let test_flatmap_basic () = + reset (); + Printf.printf "=== Test: flatMap basic ===\n"; + + (* Create a simple source collection *) + let source, emit = source ~name:"source" () in + + (* Create derived collection via flatMap *) + let derived = + flatMap ~name:"derived" source + ~f:(fun key value -> + [(key * 10, value); ((key * 10) + 1, value); ((key * 10) + 2, value)]) + () + in + + (* Add entry -> derived should have 3 entries *) + emit (Set (1, "a")); + Printf.printf "After Set(1, 'a'): derived has %d entries\n" (length derived); + assert (length derived = 3); + assert (get source 1 = Some "a"); + (* Check source was updated *) + assert (get derived 10 = Some "a"); + assert (get derived 11 = Some "a"); + assert (get derived 12 = Some "a"); + + (* Add another entry *) + emit (Set (2, "b")); + Printf.printf "After Set(2, 'b'): derived has %d entries\n" (length derived); + assert (length derived = 6); + + (* Update entry *) + emit (Set (1, "A")); + Printf.printf "After Set(1, 'A'): derived has %d entries\n" (length derived); + assert (get derived 10 = Some "A"); + assert (length derived = 6); + + (* Remove entry *) + emit (Remove 1); + Printf.printf "After Remove(1): derived has %d entries\n" (length derived); + assert (length derived = 3); + assert (get derived 10 = None); + assert (get derived 20 = Some "b"); + + Printf.printf "PASSED\n\n" + +let test_flatmap_with_merge () = + reset (); + Printf.printf "=== Test: flatMap with merge ===\n"; + + let source, emit = source ~name:"source" () in + + (* Create derived with merge *) + let derived = + flatMap ~name:"derived" source + ~f:(fun _key values -> [(0, values)]) (* all contribute to key 0 *) + ~merge:IntSet.union () + in + + (* Source 1 contributes {1, 2} *) + emit (Set (1, IntSet.of_list [1; 2])); + let v = get derived 0 |> Option.get in + Printf.printf "After source 1: {%s}\n" + (IntSet.elements v |> List.map string_of_int |> String.concat ", "); + assert (IntSet.equal v (IntSet.of_list [1; 2])); + + (* Source 2 contributes {3, 4} -> should merge *) + emit (Set (2, IntSet.of_list [3; 4])); + let v = get derived 0 |> Option.get in + Printf.printf "After source 2: {%s}\n" + (IntSet.elements v |> List.map string_of_int |> String.concat ", "); + assert (IntSet.equal v (IntSet.of_list [1; 2; 3; 4])); + + (* Remove source 1 *) + emit (Remove 1); + let v = get derived 0 |> Option.get in + Printf.printf "After remove 1: {%s}\n" + (IntSet.elements v |> List.map string_of_int |> String.concat ", "); + assert (IntSet.equal v (IntSet.of_list [3; 4])); + + Printf.printf "PASSED\n\n" + +let test_composition () = + reset (); + Printf.printf "=== Test: composition (flatMap chain) ===\n"; + + (* Source: file -> list of items *) + let source, emit = source ~name:"source" () in + + (* First flatMap: file -> items *) + let items = + flatMap ~name:"items" source + ~f:(fun path items -> + List.mapi (fun i item -> (Printf.sprintf "%s:%d" path i, item)) items) + () + in + + (* Second flatMap: item -> chars *) + let chars = + flatMap ~name:"chars" items + ~f:(fun key value -> + String.to_seq value + |> Seq.mapi (fun i c -> (Printf.sprintf "%s:%d" key i, c)) + |> List.of_seq) + () + in + + (* Add file with 2 items *) + emit (Set ("file1", ["ab"; "cd"])); + Printf.printf "After file1: items=%d, chars=%d\n" (length items) + (length chars); + assert (length items = 2); + assert (length chars = 4); + + (* Add another file *) + emit (Set ("file2", ["xyz"])); + Printf.printf "After file2: items=%d, chars=%d\n" (length items) + (length chars); + assert (length items = 3); + assert (length chars = 7); + + (* Update file1 *) + emit (Set ("file1", ["a"])); + Printf.printf "After update file1: items=%d, chars=%d\n" (length items) + (length chars); + assert (length items = 2); + (* 1 from file1 + 1 from file2 *) + assert (length chars = 4); + + (* 1 from file1 + 3 from file2 *) + Printf.printf "PASSED\n\n" + +let test_flatmap_on_existing_data () = + reset (); + Printf.printf "=== Test: flatMap on collection with existing data ===\n"; + + (* Create source and add data before creating flatMap *) + let source, emit = source ~name:"source" () in + emit (Set (1, "a")); + emit (Set (2, "b")); + + Printf.printf "Source has %d entries before flatMap\n" (length source); + + (* Create flatMap AFTER source has data *) + let derived = + flatMap ~name:"derived" source ~f:(fun k v -> [(k * 10, v)]) () + in + + (* Check derived has existing data *) + Printf.printf "Derived has %d entries (expected 2)\n" (length derived); + assert (length derived = 2); + assert (get derived 10 = Some "a"); + assert (get derived 20 = Some "b"); + + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== FlatMap Tests ======\n\n"; + test_flatmap_basic (); + test_flatmap_with_merge (); + test_composition (); + test_flatmap_on_existing_data () diff --git a/analysis/reactive/test/GlitchFreeTest.ml b/analysis/reactive/test/GlitchFreeTest.ml new file mode 100644 index 0000000000..3954075877 --- /dev/null +++ b/analysis/reactive/test/GlitchFreeTest.ml @@ -0,0 +1,289 @@ +(** Tests for glitch-free semantics with the accumulate-then-propagate scheduler *) + +open Reactive + +type file_data = {refs: (string * string) list; decl_positions: string list} +(** Type for file data *) + +type full_file_data = { + value_refs: (string * string) list; + exception_refs: (string * string) list; + full_decls: string list; +} +(** Type for full file data *) + +(** Track all deltas received *) +let track_deltas c = + let received = ref [] in + c.subscribe (fun d -> received := d :: !received); + received + +(** Count adds and removes *) +let count_delta = function + | Set _ -> (1, 0) + | Remove _ -> (0, 1) + | Batch entries -> + List.fold_left + (fun (a, r) (_, v_opt) -> + match v_opt with + | Some _ -> (a + 1, r) + | None -> (a, r + 1)) + (0, 0) entries + +let sum_deltas deltas = + List.fold_left + (fun (ta, tr) d -> + let a, r = count_delta d in + (ta + a, tr + r)) + (0, 0) deltas + +(** Test: Same source anti-join - no removals expected *) +let test_same_source_anti_join () = + reset (); + Printf.printf "=== Test: same source anti-join ===\n"; + + let src, emit = source ~name:"source" () in + + let refs = + flatMap ~name:"refs" src ~f:(fun _file (data : file_data) -> data.refs) () + in + + let decls = + flatMap ~name:"decls" src + ~f:(fun _file (data : file_data) -> + List.map (fun pos -> (pos, ())) data.decl_positions) + () + in + + let external_refs = + join ~name:"external_refs" refs decls + ~key_of:(fun posFrom _posTo -> posFrom) + ~f:(fun _posFrom posTo decl_opt -> + match decl_opt with + | Some () -> [] + | None -> [(posTo, ())]) + ~merge:(fun () () -> ()) + () + in + + let deltas = track_deltas external_refs in + + emit + (Batch + [ + set "file1" + {refs = [("A", "X"); ("B", "Y")]; decl_positions = ["A"; "B"]}; + set "file2" {refs = [("C", "Z")]; decl_positions = []}; + ]); + + let adds, removes = sum_deltas !deltas in + Printf.printf "adds=%d, removes=%d, len=%d\n" adds removes + (length external_refs); + + assert (removes = 0); + assert (length external_refs = 1); + Printf.printf "PASSED\n\n" + +(** Test: Multi-level union - the problematic case for glitch-free *) +let test_multi_level_union () = + reset (); + Printf.printf "=== Test: multi-level union ===\n"; + + let src, emit = source ~name:"source" () in + + (* refs1: level 1 *) + let refs1 = + flatMap ~name:"refs1" src + ~f:(fun _file (data : file_data) -> + List.filter (fun (k, _) -> String.length k > 0 && k.[0] = 'D') data.refs) + () + in + + (* intermediate: level 1 *) + let intermediate = + flatMap ~name:"intermediate" src + ~f:(fun _file (data : file_data) -> + List.filter (fun (k, _) -> String.length k > 0 && k.[0] = 'I') data.refs) + () + in + + (* refs2: level 2 *) + let refs2 = flatMap ~name:"refs2" intermediate ~f:(fun k v -> [(k, v)]) () in + + (* decls: level 1 *) + let decls = + flatMap ~name:"decls" src + ~f:(fun _file (data : file_data) -> + List.map (fun pos -> (pos, ())) data.decl_positions) + () + in + + (* all_refs: union at level 3 *) + let all_refs = union ~name:"all_refs" refs1 refs2 () in + + (* external_refs: join at level 4 *) + let external_refs = + join ~name:"external_refs" all_refs decls + ~key_of:(fun posFrom _posTo -> posFrom) + ~f:(fun _posFrom posTo decl_opt -> + match decl_opt with + | Some () -> [] + | None -> [(posTo, ())]) + ~merge:(fun () () -> ()) + () + in + + let deltas = track_deltas external_refs in + + emit + (Batch + [ + set "file1" {refs = [("D1", "X"); ("I1", "Y")]; decl_positions = ["D1"]}; + ]); + + let adds, removes = sum_deltas !deltas in + Printf.printf "adds=%d, removes=%d, len=%d\n" adds removes + (length external_refs); + + assert (removes = 0); + assert (length external_refs = 1); + Printf.printf "PASSED\n\n" + +(** Test: Real pipeline simulation - mimics ReactiveLiveness *) +let test_real_pipeline_simulation () = + reset (); + Printf.printf "=== Test: real pipeline simulation ===\n"; + + let src, emit = source ~name:"source" () in + + (* decls: level 1 *) + let decls = + flatMap ~name:"decls" src + ~f:(fun _file (data : full_file_data) -> + List.map (fun pos -> (pos, ())) data.full_decls) + () + in + + (* merged_value_refs: level 1 *) + let merged_value_refs = + flatMap ~name:"merged_value_refs" src + ~f:(fun _file (data : full_file_data) -> data.value_refs) + () + in + + (* exception_refs_raw: level 1 *) + let exception_refs_raw = + flatMap ~name:"exception_refs_raw" src + ~f:(fun _file (data : full_file_data) -> data.exception_refs) + () + in + + (* exception_decls: level 2 *) + let exception_decls = + flatMap ~name:"exception_decls" decls + ~f:(fun pos () -> + if String.length pos > 0 && pos.[0] = 'E' then [(pos, ())] else []) + () + in + + (* resolved_exception_refs: join at level 3 *) + let resolved_exception_refs = + join ~name:"resolved_exception_refs" exception_refs_raw exception_decls + ~key_of:(fun path _loc -> path) + ~f:(fun path loc decl_opt -> + match decl_opt with + | Some () -> [(path, loc)] + | None -> []) + () + in + + (* resolved_refs_from: level 4 *) + let resolved_refs_from = + flatMap ~name:"resolved_refs_from" resolved_exception_refs + ~f:(fun posTo posFrom -> [(posFrom, posTo)]) + () + in + + (* value_refs_from: union at level 5 *) + let value_refs_from = + union ~name:"value_refs_from" merged_value_refs resolved_refs_from () + in + + (* external_value_refs: join at level 6 *) + let external_value_refs = + join ~name:"external_value_refs" value_refs_from decls + ~key_of:(fun posFrom _posTo -> posFrom) + ~f:(fun _posFrom posTo decl_opt -> + match decl_opt with + | Some () -> [] + | None -> [(posTo, ())]) + ~merge:(fun () () -> ()) + () + in + + let deltas = track_deltas external_value_refs in + + emit + (Batch + [ + set "file1" + { + value_refs = [("A", "X")]; + exception_refs = [("E1", "Y")]; + full_decls = ["A"; "E1"]; + }; + ]); + + let _adds, removes = sum_deltas !deltas in + Printf.printf "removes=%d, len=%d\n" removes (length external_value_refs); + + assert (removes = 0); + Printf.printf "PASSED\n\n" + +(** Test: Separate sources - removals are expected here *) +let test_separate_sources () = + reset (); + Printf.printf "=== Test: separate sources (removals expected) ===\n"; + + let refs_src, emit_refs = source ~name:"refs_source" () in + let decls_src, emit_decls = source ~name:"decls_source" () in + + let external_refs = + join ~name:"external_refs" refs_src decls_src + ~key_of:(fun posFrom _posTo -> posFrom) + ~f:(fun _posFrom posTo decl_opt -> + match decl_opt with + | Some () -> [] + | None -> [(posTo, ())]) + ~merge:(fun () () -> ()) + () + in + + let deltas = track_deltas external_refs in + + (* Refs arrive first *) + emit_refs (Batch [set "A" "X"; set "B" "Y"; set "C" "Z"]); + + let adds1, _ = sum_deltas !deltas in + Printf.printf "After refs: adds=%d, len=%d\n" adds1 (length external_refs); + + (* Decls arrive second - causes removals *) + emit_decls (Batch [set "A" (); set "B" ()]); + + let adds2, removes2 = sum_deltas !deltas in + Printf.printf "After decls: adds=%d, removes=%d, len=%d\n" adds2 removes2 + (length external_refs); + + (* With separate sources, removals are expected and correct *) + assert (removes2 = 2); + (* X and Y removed *) + assert (length external_refs = 1); + (* Only Z remains *) + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== Glitch-Free Tests ======\n\n"; + test_same_source_anti_join (); + test_multi_level_union (); + test_real_pipeline_simulation (); + test_separate_sources () diff --git a/analysis/reactive/test/IntegrationTest.ml b/analysis/reactive/test/IntegrationTest.ml new file mode 100644 index 0000000000..428a1b2f8e --- /dev/null +++ b/analysis/reactive/test/IntegrationTest.ml @@ -0,0 +1,85 @@ +(** End-to-end integration tests *) + +open Reactive +open TestHelpers + +let test_file_collection () = + reset (); + Printf.printf "=== Test: File collection simulation ===\n"; + + (* Simulate file processing with regular sources *) + let files, emit_file = source ~name:"files" () in + + (* file_a: hello(2), world(1) *) + (* file_b: hello(1), foo(1) *) + + (* First flatMap: aggregate word counts across files with merge *) + let word_counts = + flatMap ~name:"word_counts" files + ~f:(fun _path counts -> StringMap.bindings counts) + (* Each file contributes its word counts *) + ~merge:( + ) (* Sum counts from multiple files *) + () + in + + (* Second flatMap: filter to words with count >= 2 *) + let frequent_words = + flatMap ~name:"frequent_words" word_counts + ~f:(fun word count -> if count >= 2 then [(word, count)] else []) + () + in + + (* Simulate processing files by emitting their word counts *) + let counts_a = + StringMap.empty |> StringMap.add "hello" 2 |> StringMap.add "world" 1 + in + let counts_b = + StringMap.empty |> StringMap.add "hello" 1 |> StringMap.add "foo" 1 + in + emit_file (Set ("file_a", counts_a)); + emit_file (Set ("file_b", counts_b)); + + Printf.printf "Word counts:\n"; + iter (fun word count -> Printf.printf " %s: %d\n" word count) word_counts; + + Printf.printf "Frequent words (count >= 2):\n"; + iter (fun word count -> Printf.printf " %s: %d\n" word count) frequent_words; + + (* Verify: hello=3 (2 from a + 1 from b), world=1, foo=1 *) + assert (get word_counts "hello" = Some 3); + assert (get word_counts "world" = Some 1); + assert (get word_counts "foo" = Some 1); + assert (length word_counts = 3); + + (* Verify frequent: only "hello" with count 3 *) + assert (length frequent_words = 1); + assert (get frequent_words "hello" = Some 3); + + (* Modify file_a: now hello(1), world(2) *) + Printf.printf "\nModifying file_a...\n"; + let counts_a' = + StringMap.empty |> StringMap.add "hello" 1 |> StringMap.add "world" 2 + in + emit_file (Set ("file_a", counts_a')); + + Printf.printf "Word counts after modification:\n"; + iter (fun word count -> Printf.printf " %s: %d\n" word count) word_counts; + + Printf.printf "Frequent words after modification:\n"; + iter (fun word count -> Printf.printf " %s: %d\n" word count) frequent_words; + + (* Verify: hello=2 (1 from a + 1 from b), world=2, foo=1 *) + assert (get word_counts "hello" = Some 2); + assert (get word_counts "world" = Some 2); + assert (get word_counts "foo" = Some 1); + + (* Verify frequent: hello=2, world=2 *) + assert (length frequent_words = 2); + assert (get frequent_words "hello" = Some 2); + assert (get frequent_words "world" = Some 2); + + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== Integration Tests ======\n\n"; + test_file_collection () diff --git a/analysis/reactive/test/JoinTest.ml b/analysis/reactive/test/JoinTest.ml new file mode 100644 index 0000000000..70c4eb6136 --- /dev/null +++ b/analysis/reactive/test/JoinTest.ml @@ -0,0 +1,122 @@ +(** Join combinator tests *) + +open Reactive + +let test_join () = + reset (); + Printf.printf "=== Test: join (reactive lookup/join) ===\n"; + + (* Left collection: exception refs (path -> loc_from) *) + let left, emit_left = source ~name:"left" () in + + (* Right collection: decl index (path -> decl_pos) *) + let right, emit_right = source ~name:"right" () in + + (* Join: for each (path, loc_from) in left, look up path in right *) + let joined = + join ~name:"joined" left right + ~key_of:(fun path _loc_from -> path) + ~f:(fun _path loc_from decl_pos_opt -> + match decl_pos_opt with + | Some decl_pos -> + (* Produce (decl_pos, loc_from) pairs *) + [(decl_pos, loc_from)] + | None -> []) + () + in + + (* Initially empty *) + assert (length joined = 0); + + (* Add declaration at path "A" with pos 100 *) + emit_right (Set ("A", 100)); + Printf.printf "After right Set(A, 100): joined=%d\n" (length joined); + assert (length joined = 0); + + (* No left entries yet *) + + (* Add exception ref at path "A" from loc 1 *) + emit_left (Set ("A", 1)); + Printf.printf "After left Set(A, 1): joined=%d\n" (length joined); + assert (length joined = 1); + assert (get joined 100 = Some 1); + + (* decl_pos 100 -> loc_from 1 *) + + (* Add another exception ref at path "B" (no matching decl) *) + emit_left (Set ("B", 2)); + Printf.printf "After left Set(B, 2): joined=%d (B has no decl)\n" + (length joined); + assert (length joined = 1); + + (* Add declaration for path "B" *) + emit_right (Set ("B", 200)); + Printf.printf "After right Set(B, 200): joined=%d\n" (length joined); + assert (length joined = 2); + assert (get joined 200 = Some 2); + + (* Update right: change B's decl_pos *) + emit_right (Set ("B", 201)); + Printf.printf "After right Set(B, 201): joined=%d\n" (length joined); + assert (length joined = 2); + assert (get joined 200 = None); + (* Old key gone *) + assert (get joined 201 = Some 2); + + (* New key has the value *) + + (* Remove left entry A *) + emit_left (Remove "A"); + Printf.printf "After left Remove(A): joined=%d\n" (length joined); + assert (length joined = 1); + assert (get joined 100 = None); + + Printf.printf "PASSED\n\n" + +let test_join_with_merge () = + reset (); + Printf.printf "=== Test: join with merge ===\n"; + + (* Multiple left entries can map to same right key *) + let left, emit_left = source ~name:"left" () in + let right, emit_right = source ~name:"right" () in + + (* Join with merge: all entries produce to key 0 *) + let joined = + join ~name:"joined" left right + ~key_of:(fun _id path -> path) (* Look up by path *) + ~f:(fun _id _path value_opt -> + match value_opt with + | Some v -> [(0, v)] (* All contribute to key 0 *) + | None -> []) + ~merge:( + ) (* Sum values *) + () + in + + emit_right (Set ("X", 10)); + emit_left (Set (1, "X")); + emit_left (Set (2, "X")); + + Printf.printf "Two entries looking up X (value 10): sum=%d\n" + (get joined 0 |> Option.value ~default:0); + assert (get joined 0 = Some 20); + + (* 10 + 10 *) + emit_right (Set ("X", 5)); + Printf.printf "After right changes to 5: sum=%d\n" + (get joined 0 |> Option.value ~default:0); + assert (get joined 0 = Some 10); + + (* 5 + 5 *) + emit_left (Remove 1); + Printf.printf "After removing one left entry: sum=%d\n" + (get joined 0 |> Option.value ~default:0); + assert (get joined 0 = Some 5); + + (* Only one left *) + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== Join Tests ======\n\n"; + test_join (); + test_join_with_merge () diff --git a/analysis/reactive/test/ReactiveTest.ml b/analysis/reactive/test/ReactiveTest.ml new file mode 100644 index 0000000000..e94162f2b1 --- /dev/null +++ b/analysis/reactive/test/ReactiveTest.ml @@ -0,0 +1,13 @@ +(** Main test driver for Reactive tests *) + +let () = + Printf.printf "\n====== Reactive Collection Tests ======\n"; + FlatMapTest.run_all (); + JoinTest.run_all (); + UnionTest.run_all (); + FixpointBasicTest.run_all (); + FixpointIncrementalTest.run_all (); + BatchTest.run_all (); + IntegrationTest.run_all (); + GlitchFreeTest.run_all (); + Printf.printf "\nAll tests passed!\n" diff --git a/analysis/reactive/test/TestHelpers.ml b/analysis/reactive/test/TestHelpers.ml new file mode 100644 index 0000000000..54067172fe --- /dev/null +++ b/analysis/reactive/test/TestHelpers.ml @@ -0,0 +1,57 @@ +(** Shared test helpers for Reactive tests *) + +open Reactive + +(** {1 Compatibility helpers} *) + +(* V2's emit takes deltas, not tuples. These helpers adapt tuple-style calls. *) +let[@warning "-32"] emit_kv emit (k, v_opt) = + match v_opt with + | Some v -> emit (Set (k, v)) + | None -> emit (Remove k) + +(* subscribe takes collection first in V2, but we want handler first for compatibility *) +let subscribe handler t = t.subscribe handler + +(* emit_batch: emit a batch delta to a source *) +let emit_batch entries emit_fn = emit_fn (Batch entries) + +(* Helper to track added/removed across all delta types *) +let[@warning "-32"] track_changes () = + let added = ref [] in + let removed = ref [] in + let handler = function + | Set (k, _) -> added := k :: !added + | Remove k -> removed := k :: !removed + | Batch entries -> + List.iter + (fun (k, v_opt) -> + match v_opt with + | Some _ -> added := k :: !added + | None -> removed := k :: !removed) + entries + in + (added, removed, handler) + +(** {1 File helpers} *) + +let[@warning "-32"] read_lines path = + let ic = open_in path in + let lines = ref [] in + (try + while true do + lines := input_line ic :: !lines + done + with End_of_file -> ()); + close_in ic; + List.rev !lines + +let[@warning "-32"] write_lines path lines = + let oc = open_out path in + List.iter (fun line -> output_string oc (line ^ "\n")) lines; + close_out oc + +(** {1 Common set modules} *) + +module IntSet = Set.Make (Int) +module StringMap = Map.Make (String) diff --git a/analysis/reactive/test/UnionTest.ml b/analysis/reactive/test/UnionTest.ml new file mode 100644 index 0000000000..c532180389 --- /dev/null +++ b/analysis/reactive/test/UnionTest.ml @@ -0,0 +1,136 @@ +(** Union combinator tests *) + +open Reactive +open TestHelpers + +let test_union_basic () = + reset (); + Printf.printf "=== Test: union basic ===\n"; + + (* Left collection *) + let left, emit_left = source ~name:"left" () in + + (* Right collection *) + let right, emit_right = source ~name:"right" () in + + (* Create union without merge (right takes precedence) *) + let combined = union ~name:"combined" left right () in + + (* Initially empty *) + assert (length combined = 0); + + (* Add to left *) + emit_left (Set ("a", 1)); + Printf.printf "After left Set(a, 1): combined=%d\n" (length combined); + assert (length combined = 1); + assert (get combined "a" = Some 1); + + (* Add different key to right *) + emit_right (Set ("b", 2)); + Printf.printf "After right Set(b, 2): combined=%d\n" (length combined); + assert (length combined = 2); + assert (get combined "a" = Some 1); + assert (get combined "b" = Some 2); + + (* Add same key to right (should override left) *) + emit_right (Set ("a", 10)); + Printf.printf "After right Set(a, 10): combined a=%d\n" + (get combined "a" |> Option.value ~default:(-1)); + assert (length combined = 2); + assert (get combined "a" = Some 10); + + (* Right takes precedence *) + + (* Remove from right (left value should show through) *) + emit_right (Remove "a"); + Printf.printf "After right Remove(a): combined a=%d\n" + (get combined "a" |> Option.value ~default:(-1)); + assert (get combined "a" = Some 1); + + (* Left shows through *) + + (* Remove from left *) + emit_left (Remove "a"); + Printf.printf "After left Remove(a): combined=%d\n" (length combined); + assert (length combined = 1); + assert (get combined "a" = None); + assert (get combined "b" = Some 2); + + Printf.printf "PASSED\n\n" + +let test_union_with_merge () = + reset (); + Printf.printf "=== Test: union with merge ===\n"; + + (* Left collection *) + let left, emit_left = source ~name:"left" () in + + (* Right collection *) + let right, emit_right = source ~name:"right" () in + + (* Create union with set union as merge *) + let combined = union ~name:"combined" left right ~merge:IntSet.union () in + + (* Add to left: key "x" -> {1, 2} *) + emit_left (Set ("x", IntSet.of_list [1; 2])); + let v = get combined "x" |> Option.get in + Printf.printf "After left Set(x, {1,2}): {%s}\n" + (IntSet.elements v |> List.map string_of_int |> String.concat ", "); + assert (IntSet.equal v (IntSet.of_list [1; 2])); + + (* Add to right: key "x" -> {3, 4} (should merge) *) + emit_right (Set ("x", IntSet.of_list [3; 4])); + let v = get combined "x" |> Option.get in + Printf.printf "After right Set(x, {3,4}): {%s}\n" + (IntSet.elements v |> List.map string_of_int |> String.concat ", "); + assert (IntSet.equal v (IntSet.of_list [1; 2; 3; 4])); + + (* Update left: key "x" -> {1, 5} *) + emit_left (Set ("x", IntSet.of_list [1; 5])); + let v = get combined "x" |> Option.get in + Printf.printf "After left update to {1,5}: {%s}\n" + (IntSet.elements v |> List.map string_of_int |> String.concat ", "); + assert (IntSet.equal v (IntSet.of_list [1; 3; 4; 5])); + + (* Remove right *) + emit_right (Remove "x"); + let v = get combined "x" |> Option.get in + Printf.printf "After right Remove(x): {%s}\n" + (IntSet.elements v |> List.map string_of_int |> String.concat ", "); + assert (IntSet.equal v (IntSet.of_list [1; 5])); + + Printf.printf "PASSED\n\n" + +let test_union_existing_data () = + reset (); + Printf.printf "=== Test: union on collections with existing data ===\n"; + + (* Create collections with existing data *) + let left, emit_left = source ~name:"left" () in + emit_left (Set (1, "a")); + emit_left (Set (2, "b")); + + let right, emit_right = source ~name:"right" () in + emit_right (Set (2, "B")); + (* Overlaps with left *) + emit_right (Set (3, "c")); + + (* Create union after both have data *) + let combined = union ~name:"combined" left right () in + + Printf.printf "Union has %d entries (expected 3)\n" (length combined); + assert (length combined = 3); + assert (get combined 1 = Some "a"); + (* Only in left *) + assert (get combined 2 = Some "B"); + (* Right takes precedence *) + assert (get combined 3 = Some "c"); + + (* Only in right *) + Printf.printf "PASSED\n\n" + +let run_all () = + Printf.printf "\n====== Union Tests ======\n\n"; + test_union_basic (); + test_union_with_merge (); + test_union_existing_data () diff --git a/analysis/reactive/test/dune b/analysis/reactive/test/dune new file mode 100644 index 0000000000..cd8fe3ad9c --- /dev/null +++ b/analysis/reactive/test/dune @@ -0,0 +1,14 @@ +(executable + (name ReactiveTest) + (modules + ReactiveTest + TestHelpers + FlatMapTest + JoinTest + UnionTest + FixpointBasicTest + FixpointIncrementalTest + BatchTest + IntegrationTest + GlitchFreeTest) + (libraries reactive)) diff --git a/analysis/reanalyze/ARCHITECTURE.md b/analysis/reanalyze/ARCHITECTURE.md index 1d341ae52e..ae04765813 100644 --- a/analysis/reanalyze/ARCHITECTURE.md +++ b/analysis/reanalyze/ARCHITECTURE.md @@ -21,82 +21,9 @@ This design enables: ## Pipeline Diagram -``` -┌─────────────────────────────────────────────────────────────────────────────┐ -│ DCE ANALYSIS PIPELINE │ -└─────────────────────────────────────────────────────────────────────────────┘ - - ┌─────────────┐ - │ DceConfig.t │ (explicit configuration) - └──────┬──────┘ - │ - ╔════════════════════════════════╪════════════════════════════════════════╗ - ║ PHASE 1: MAP (per-file) │ ║ - ╠════════════════════════════════╪════════════════════════════════════════╣ - ║ ▼ ║ - ║ ┌──────────┐ process_cmt_file ┌───────────────────────────────┐ ║ - ║ │ file1.cmt├──────────────────────►│ file_data { │ ║ - ║ └──────────┘ │ annotations: builder │ ║ - ║ ┌──────────┐ process_cmt_file │ decls: builder │ ║ - ║ │ file2.cmt├──────────────────────►│ refs: builder │ ║ - ║ └──────────┘ │ file_deps: builder │ ║ - ║ ┌──────────┐ process_cmt_file │ cross_file: builder │ ║ - ║ │ file3.cmt├──────────────────────►│ } │ ║ - ║ └──────────┘ └───────────────────────────────┘ ║ - ║ │ ║ - ║ Local mutable state OK │ file_data list ║ - ╚══════════════════════════════════════════════════╪══════════════════════╝ - │ - ╔══════════════════════════════════════════════════╪══════════════════════╗ - ║ PHASE 2: MERGE (combine builders) │ ║ - ╠══════════════════════════════════════════════════╪══════════════════════╣ - ║ ▼ ║ - ║ ┌─────────────────────────────────────────────────────────────────┐ ║ - ║ │ FileAnnotations.merge_all → annotations: FileAnnotations.t │ ║ - ║ │ Declarations.merge_all → decls: Declarations.t │ ║ - ║ │ References.merge_all → refs: References.t │ ║ - ║ │ FileDeps.merge_all → file_deps: FileDeps.t │ ║ - ║ │ CrossFileItems.merge_all → cross_file: CrossFileItems.t │ ║ - ║ │ │ ║ - ║ │ CrossFileItems.compute_optional_args_state │ ║ - ║ │ → optional_args_state: State.t │ ║ - ║ └─────────────────────────────────────────────────────────────────┘ ║ - ║ │ ║ - ║ Pure functions, immutable output │ merged data ║ - ╚══════════════════════════════════════════════════╪══════════════════════╝ - │ - ╔══════════════════════════════════════════════════╪══════════════════════╗ - ║ PHASE 3: SOLVE (pure deadness computation) │ ║ - ╠══════════════════════════════════════════════════╪══════════════════════╣ - ║ ▼ ║ - ║ ┌─────────────────────────────────────────────────────────────────┐ ║ - ║ │ Pass 1: DeadCommon.solveDead (core deadness) │ ║ - ║ │ ~annotations ~decls ~refs ~file_deps ~config │ ║ - ║ │ → AnalysisResult.t (dead/live status resolved) │ ║ - ║ │ │ ║ - ║ │ Pass 2: Optional args analysis (liveness-aware) │ ║ - ║ │ CrossFileItems.compute_optional_args_state ~is_live │ ║ - ║ │ DeadOptionalArgs.check (only for live decls) │ ║ - ║ │ → AnalysisResult.t { issues: Issue.t list } │ ║ - ║ └─────────────────────────────────────────────────────────────────┘ ║ - ║ │ ║ - ║ Pure functions: immutable in → immutable out │ issues ║ - ╚══════════════════════════════════════════════════╪══════════════════════╝ - │ - ╔══════════════════════════════════════════════════╪══════════════════════╗ - ║ PHASE 4: REPORT (side effects at the edge) │ ║ - ╠══════════════════════════════════════════════════╪══════════════════════╣ - ║ ▼ ║ - ║ ┌─────────────────────────────────────────────────────────────────┐ ║ - ║ │ AnalysisResult.get_issues │ ║ - ║ │ |> List.iter (fun issue -> Log_.warning ~loc issue.description) │ ║ - ║ │ │ ║ - ║ │ (Optional: EmitJson for JSON output) │ ║ - ║ └─────────────────────────────────────────────────────────────────┘ ║ - ║ ║ - ║ Side effects only here: logging, JSON output ║ - ╚════════════════════════════════════════════════════════════════════════╝ -``` +> **Source**: [`diagrams/batch-pipeline.mmd`](diagrams/batch-pipeline.mmd) + +![Batch Pipeline](diagrams/batch-pipeline.svg) --- @@ -107,7 +34,7 @@ This design enables: | `DceFileProcessing.file_data` | Per-file collected data | Builders (mutable during AST walk) | | `FileAnnotations.t` | Source annotations (`@dead`, `@live`) | Immutable after merge | | `Declarations.t` | All exported declarations (pos → Decl.t) | Immutable after merge | -| `References.t` | Value/type references (pos → PosSet.t) | Immutable after merge | +| `References.t` | Value/type references (source → targets) | Immutable after merge | | `FileDeps.t` | Cross-file dependencies (file → FileSet.t) | Immutable after merge | | `OptionalArgsState.t` | Computed optional arg state per-decl | Immutable | | `AnalysisResult.t` | Solver output with Issue.t list | Immutable | @@ -158,14 +85,18 @@ let file_deps = FileDeps.merge_all (file_data_list |> List.map (fun fd -> fd.fil **Output**: `AnalysisResult.t` containing `Issue.t list` -**Algorithm** (two-pass for liveness-aware optional args): +**Algorithm** (forward fixpoint + liveness-aware optional args): -**Pass 1: Core deadness resolution** -1. Build file dependency order (roots to leaves) -2. Sort declarations by dependency order -3. For each declaration, resolve references recursively -4. Determine dead/live status based on reference count -5. Collect issues for dead declarations +**Core liveness computation** (`Liveness.compute_forward`): +1. Identify roots: declarations with `@live`/`@genType` annotations or referenced from outside any declaration +2. Build index mapping each declaration to its outgoing references (refs_from direction) +3. Run forward fixpoint: propagate liveness from roots through references +4. Return set of all live positions + +**Pass 1: Deadness resolution** +1. Compute liveness via forward propagation +2. For each declaration, check if in live set +3. Mark dead declarations, collect issues **Pass 2: Liveness-aware optional args analysis** 1. Use `Decl.isLive` to build an `is_live` predicate from Pass 1 results @@ -208,6 +139,172 @@ The key insight: **immutable data structures enable safe incremental updates** - --- +## Reactive Pipelines + +The reactive layer (`analysis/reactive/`) provides delta-based incremental updates. Instead of re-running entire phases, changes propagate automatically through derived collections. + +### Core Reactive Primitives + +| Primitive | Description | +|-----------|-------------| +| `Reactive.t ('k, 'v)` | Universal reactive collection interface | +| `subscribe` | Register for delta notifications | +| `iter` | Iterate current entries | +| `get` | Lookup by key | +| `delta` | Change notification: `Set (k, v)`, `Remove k`, or `Batch [(k, v option); ...]` | +| `source` | Create a mutable source collection with emit function | +| `flatMap` | Transform collection, optionally merge same-key values | +| `join` | Hash join two collections (left join behavior) | +| `union` | Combine two collections, optionally merge same-key values | +| `fixpoint` | Transitive closure: `init + edges → reachable` | +| `ReactiveFileCollection` | File-backed collection with change detection | + +### Glitch-Free Semantics via Topological Scheduling + +The reactive system implements **glitch-free propagation** using an accumulate-then-propagate scheduler. This ensures derived collections always see consistent parent states, similar to SKStore's approach. + +**How it works:** +1. Each node has a `level` (topological order): + - Source collections have `level = 0` + - Derived collections have `level = max(parent levels) + 1` +2. Each combinator **accumulates** incoming deltas in pending buffers +3. The scheduler visits dirty nodes in level order and calls `process()` +4. Each node processes **once per wave** with complete input from all parents + +**Example ordering:** +``` +file_collection (L0) → file_data (L1) → decls (L2) → live (L14) → dead_decls (L15) +``` + +When a batch of file changes arrives: +1. Deltas accumulate in pending buffers (no immediate processing) +2. Scheduler processes level 0, then level 1, etc. +3. A join processes only after **both** parents have updated + +The `Reactive.Registry` and `Reactive.Scheduler` modules provide: +- Named nodes with stats tracking (use `-timing` flag to see stats) +- `to_mermaid()` - Generate pipeline diagram (use `-mermaid` flag) +- `print_stats()` - Show per-node timing and delta counts + +### Fully Reactive Analysis Pipeline + +The reactive pipeline computes issues directly from source files with **zero recomputation on cache hits**: + +``` +Files → file_data → decls, annotations, refs → live (fixpoint) → dead/live_decls → issues → REPORT + ↓ ↓ ↓ ↓ ↓ ↓ + ReactiveFile ReactiveMerge ReactiveLiveness ReactiveSolver iter + Collection (flatMap) (fixpoint) (multiple joins) (only) +``` + +**Key property**: When no files change, no computation happens. All reactive collections are stable. Only the final `collect_issues` call iterates pre-computed collections (O(issues)). + +### Pipeline Stages + +| Stage | Input | Output | Combinator | +|-------|-------|--------|------------| +| **File Processing** | `.cmt` files | `file_data` | `ReactiveFileCollection` | +| **Merge** | `file_data` | `decls`, `annotations`, `refs` | `flatMap` | +| **Liveness** | `refs`, `annotations` | `live` (positions) | `fixpoint` | +| **Dead/Live Partition** | `decls`, `live` | `dead_decls`, `live_decls` | `join` (partition by liveness) | +| **Dead Modules** | `dead_decls`, `live_decls` | `dead_modules` | `flatMap` + `join` (anti-join) | +| **Per-File Grouping** | `dead_decls`, `refs` | `dead_decls_by_file`, `refs_by_file` | `flatMap` with merge | +| **Per-File Issues** | `dead_decls_by_file`, `annotations` | `issues_by_file` | `flatMap` (sort + filter + generate) | +| **Incorrect @dead** | `live_decls`, `annotations` | `incorrect_dead_decls` | `join` (live with Dead annotation) | +| **Module Issues** | `dead_modules`, `issues_by_file` | `dead_module_issues` | `flatMap` + `join` | +| **Report** | all issue collections | stdout | `iter` (ONLY iteration) | + +### ReactiveSolver Collections + +| Collection | Type | Description | +|------------|------|-------------| +| `dead_decls` | `(pos, Decl.t)` | Declarations NOT in live set | +| `live_decls` | `(pos, Decl.t)` | Declarations IN live set | +| `dead_modules` | `(Name.t, Location.t)` | Modules with only dead declarations (anti-join) | +| `dead_decls_by_file` | `(file, Decl.t list)` | Dead decls grouped by file | +| `value_refs_from_by_file` | `(file, (pos, PosSet.t) list)` | Refs grouped by source file (for hasRefBelow) | +| `issues_by_file` | `(file, Issue.t list * Name.t list)` | Per-file issues + reported modules | +| `incorrect_dead_decls` | `(pos, Decl.t)` | Live decls with @dead annotation | +| `dead_module_issues` | `(Name.t, Issue.t)` | Module issues (join of dead_modules + modules_with_reported) | + +**Note**: Optional args analysis (unused/redundant arguments) is not yet in the reactive pipeline - it still uses the non-reactive path (~8-14ms). TODO: Add `live_decls + cross_file_items → optional_args_issues` to the reactive pipeline. + +### Reactive Pipeline Diagram + +> **Source**: [`diagrams/reactive-pipeline.mmd`](diagrams/reactive-pipeline.mmd) + +![Reactive Pipeline](diagrams/reactive-pipeline.svg) + +This is a high-level view (~25 nodes). See also the [full detailed diagram source](diagrams/reactive-pipeline-full.mmd) with all 44 nodes (auto-generated via `-mermaid` flag). + +Key stages: + +1. **File Layer**: `file_collection` → `file_data` → extracted collections +2. **TypeDeps**: `decl_by_path` → interface/implementation refs → `all_type_refs` +3. **ExceptionRefs**: `cross_file` → `resolved_refs` → `resolved_from` +4. **DeclRefs**: Combines value/type refs → `combined` edges +5. **Liveness**: `annotated_roots` + `externally_referenced` → `all_roots` + `edges` → `live` (fixpoint) +6. **Solver**: `decls` + `live` → `dead_decls`/`live_decls` → per-file issues → module issues + +Use `-mermaid` flag to generate the current pipeline diagram from code. + +### Delta Propagation + +When a file changes: + +1. `ReactiveFileCollection` detects change, emits delta for `file_data` +2. `ReactiveMerge` receives delta, updates `decls`, `refs`, `annotations` +3. `ReactiveLiveness` receives delta, updates `live` set via incremental fixpoint +4. `ReactiveSolver` receives delta, updates `dead_decls` and `issues` via reactive joins +5. **Only affected entries are recomputed** - untouched entries remain stable + +When no files change: +- **Zero computation** - all reactive collections are stable +- Only `collect_issues` iterates (O(issues)) - this is the ONLY iteration in the entire pipeline +- Reporting is linear in the number of issues + +### Performance Characteristics + +| Scenario | Solving | Reporting | Total | +|----------|---------|-----------|-------| +| Cold start (4900 files) | ~2ms | ~3ms | ~7.7s | +| Cache hit (0 files changed) | ~1-5ms | ~3-8ms | ~30ms | +| Single file change | O(affected_decls) | O(issues) | minimal | + +**Key insight**: On cache hit, `Solving` time is just iterating the reactive `issues` collection. +No joins are recomputed, no fixpoints are re-run - the reactive collections are stable. + +### Reactive Modules + +| Module | Responsibility | +|--------|---------------| +| `Reactive` | Core primitives: `source`, `flatMap`, `join`, `union`, `fixpoint`, `Scheduler`, `Registry` | +| `ReactiveFileCollection` | File-backed collection with change detection | +| `ReactiveAnalysis` | CMT processing with file caching | +| `ReactiveMerge` | Derives decls, annotations, refs from file_data | +| `ReactiveTypeDeps` | Type-label dependency resolution | +| `ReactiveExceptionRefs` | Exception ref resolution via join | +| `ReactiveDeclRefs` | Maps declarations to their outgoing references | +| `ReactiveLiveness` | Computes live positions via reactive fixpoint | +| `ReactiveSolver` | Computes dead_decls and issues via reactive joins | + +### Stats Tracking + +Use `-timing` flag to see per-node statistics: + +| Stat | Description | +|------|-------------| +| `d_recv` | Deltas received (Set/Remove/Batch messages) | +| `e_recv` | Entries received (after batch expansion) | +| `+in` / `-in` | Adds/removes received from upstream | +| `d_emit` | Deltas emitted downstream | +| `e_emit` | Entries in emitted deltas | +| `+out` / `-out` | Adds/removes emitted (non-zero `-out` indicates churn) | +| `runs` | Times the node's `process()` was called | +| `time_ms` | Cumulative processing time | + +--- + ## Testing **Order-independence test**: Run with `-test-shuffle` flag to randomize file processing order. The test (`make test-reanalyze-order-independence`) verifies that shuffled runs produce identical output. @@ -226,13 +323,15 @@ The key insight: **immutable data structures enable safe incremental updates** - | `Reanalyze` | Entry point, orchestrates pipeline | | `DceFileProcessing` | Phase 1: Per-file AST processing | | `DceConfig` | Configuration (CLI flags + run config) | -| `DeadCommon` | Phase 3: Solver (`solveDead`) | +| `DeadCommon` | Phase 3: Solver (`solveDead`, `solveDeadReactive`) | +| `Liveness` | Forward fixpoint liveness computation | | `Declarations` | Declaration storage (builder/immutable) | -| `References` | Reference tracking (builder/immutable) | +| `References` | Reference tracking (source → targets) | | `FileAnnotations` | Source annotation tracking | | `FileDeps` | Cross-file dependency graph | | `CrossFileItems` | Cross-file optional args and exceptions | | `AnalysisResult` | Immutable solver output | | `Issue` | Issue type definitions | | `Log_` | Phase 4: Logging output | +| `ReactiveSolver` | Reactive dead_decls → issues computation | diff --git a/analysis/reanalyze/README.md b/analysis/reanalyze/README.md new file mode 100644 index 0000000000..7255664a54 --- /dev/null +++ b/analysis/reanalyze/README.md @@ -0,0 +1,169 @@ +# Reanalyze + +Dead code analysis and other experimental analyses for ReScript. + +## Analyses + +- **Dead Code Elimination (DCE)** - Detect unused values, types, and modules +- **Exception Analysis** - Track potential exceptions through call chains +- **Termination Analysis** - Experimental analysis for detecting non-terminating functions + +## Usage + +```bash +# Run DCE analysis on current project (reads rescript.json) +rescript-editor-analysis reanalyze -config + +# Run DCE analysis on specific CMT directory +rescript-editor-analysis reanalyze -dce-cmt path/to/lib/bs + +# Run all analyses +rescript-editor-analysis reanalyze -all +``` + +## Performance Options + +### Parallel Processing + +Use multiple CPU cores for faster analysis: + +```bash +# Use 4 parallel domains +reanalyze -config -parallel 4 + +# Auto-detect number of cores +reanalyze -config -parallel -1 +``` + +### CMT Cache (Experimental) + +Use memory-mapped cache for CMT file reading: + +```bash +reanalyze -config -cmt-cache +``` + +### Reactive Mode (Experimental) + +Cache processed file data and skip unchanged files on subsequent runs: + +```bash +reanalyze -config -reactive +``` + +This provides significant speedup for repeated analysis (e.g., in a watch mode or service): + +| Mode | CMT Processing | Total | Speedup | +|------|----------------|-------|---------| +| Standard | 0.78s | 1.01s | 1x | +| Reactive (warm) | 0.01s | 0.20s | 5x | + +### Benchmarking + +Run analysis multiple times to measure cache effectiveness: + +```bash +reanalyze -config -reactive -timing -runs 3 +``` + +## CLI Flags + +| Flag | Description | +|------|-------------| +| `-config` | Read analysis mode from rescript.json | +| `-dce` | Run dead code analysis | +| `-exception` | Run exception analysis | +| `-termination` | Run termination analysis | +| `-all` | Run all analyses | +| `-parallel n` | Use n parallel domains (0=sequential, -1=auto) | +| `-cmt-cache` | Use mmap cache for CMT files | +| `-reactive` | Cache processed file_data, skip unchanged files | +| `-runs n` | Run analysis n times (for benchmarking) | +| `-timing` | Report timing of analysis phases | +| `-debug` | Print debug information | +| `-json` | Output in JSON format | +| `-ci` | Internal flag for CI mode | + +## Architecture + +See [ARCHITECTURE.md](ARCHITECTURE.md) for details on the analysis pipeline. + +The DCE analysis is structured as a pure pipeline: + +1. **MAP** - Process each `.cmt` file independently → per-file data +2. **MERGE** - Combine all per-file data → project-wide view +3. **SOLVE** - Compute dead/live status → issues +4. **REPORT** - Output issues + +This design enables order-independence, parallelization, and incremental updates. + +## Reactive Analysis + +The reactive mode (`-reactive`) uses skip-lite's Marshal_cache to efficiently detect file changes: + +1. **First run**: All files are processed and results cached +2. **Subsequent runs**: Only changed files are re-processed +3. **Unchanged files**: Return cached `file_data` immediately (no I/O or unmarshalling) + +This is the foundation for a persistent analysis service that can respond to file changes in milliseconds. + +## Development + +### Testing + +```bash +# Run reanalyze tests +make test-reanalyze + +# Run with shuffled file order (order-independence test) +make test-reanalyze-order-independence + +# Run parallel mode test +make test-reanalyze-parallel +``` + +### Benchmarking + +The benchmark project generates ~5000 files to measure analysis performance: + +```bash +cd tests/analysis_tests/tests-reanalyze/deadcode-benchmark + +# Generate files, build, and run sequential vs parallel benchmark +make benchmark + +# Compare CMT cache effectiveness (cold vs warm) +make time-cache + +# Benchmark reactive mode (shows speedup on repeated runs) +make time-reactive +``` + +#### Reactive Benchmark + +The `make time-reactive` target runs: + +1. **Standard mode** (baseline) - Full analysis every time +2. **Reactive mode** with 3 runs - First run is cold (processes all files), subsequent runs are warm (skip unchanged files) + +Example output: + +``` +=== Reactive mode benchmark === + +Standard (baseline): + CMT processing: 0.78s + Total: 1.01s + +Reactive mode (3 runs): + === Run 1/3 === + CMT processing: 0.78s + Total: 1.02s + === Run 2/3 === + CMT processing: 0.01s <-- 74x faster + Total: 0.20s <-- 5x faster + === Run 3/3 === + CMT processing: 0.01s + Total: 0.20s +``` + diff --git a/analysis/reanalyze/diagrams/batch-pipeline.mmd b/analysis/reanalyze/diagrams/batch-pipeline.mmd new file mode 100644 index 0000000000..cc2c1bde94 --- /dev/null +++ b/analysis/reanalyze/diagrams/batch-pipeline.mmd @@ -0,0 +1,53 @@ +%%{init: {'theme': 'base', 'themeVariables': { 'primaryColor': '#e8f4fd', 'primaryTextColor': '#1a1a1a', 'primaryBorderColor': '#4a90d9', 'lineColor': '#4a90d9'}}}%% +flowchart TB + subgraph Phase1["PHASE 1: MAP (per-file)"] + CMT1["file1.cmt"] + CMT2["file2.cmt"] + CMT3["file3.cmt"] + PROC["process_cmt_file"] + FD1["file_data₁"] + FD2["file_data₂"] + FD3["file_data₃"] + + CMT1 --> PROC + CMT2 --> PROC + CMT3 --> PROC + PROC --> FD1 + PROC --> FD2 + PROC --> FD3 + end + + subgraph Phase2["PHASE 2: MERGE"] + MERGE["merge_all"] + MERGED["merged {
annotations,
decls,
refs,
file_deps
}"] + + FD1 --> MERGE + FD2 --> MERGE + FD3 --> MERGE + MERGE --> MERGED + end + + subgraph Phase3["PHASE 3: SOLVE"] + SOLVE["solveDead"] + RESULT["AnalysisResult {
issues: Issue.t list
}"] + + MERGED --> SOLVE + SOLVE --> RESULT + end + + subgraph Phase4["PHASE 4: REPORT"] + REPORT["Log_.warning"] + + RESULT --> REPORT + end + + classDef phase1 fill:#e8f4fd,stroke:#4a90d9 + classDef phase2 fill:#f0f7e6,stroke:#6b8e23 + classDef phase3 fill:#fff5e6,stroke:#d4a574 + classDef phase4 fill:#ffe6e6,stroke:#cc6666 + + class CMT1,CMT2,CMT3,PROC,FD1,FD2,FD3 phase1 + class MERGE,MERGED phase2 + class SOLVE,RESULT phase3 + class REPORT phase4 + diff --git a/analysis/reanalyze/diagrams/batch-pipeline.svg b/analysis/reanalyze/diagrams/batch-pipeline.svg new file mode 100644 index 0000000000..5877ce5c5a --- /dev/null +++ b/analysis/reanalyze/diagrams/batch-pipeline.svg @@ -0,0 +1 @@ +

PHASE 4: REPORT

PHASE 3: SOLVE

PHASE 2: MERGE

PHASE 1: MAP (per-file)

file1.cmt

file2.cmt

file3.cmt

process_cmt_file

file_data₁

file_data₂

file_data₃

merge_all

merged {
annotations,
decls,
refs,
file_deps
}

solveDead

AnalysisResult {
issues: Issue.t list
}

Log_.warning

\ No newline at end of file diff --git a/analysis/reanalyze/diagrams/delta-propagation.mmd b/analysis/reanalyze/diagrams/delta-propagation.mmd new file mode 100644 index 0000000000..94f6d39c17 --- /dev/null +++ b/analysis/reanalyze/diagrams/delta-propagation.mmd @@ -0,0 +1,26 @@ +%%{init: {'theme': 'base', 'themeVariables': { 'primaryColor': '#e8f4fd', 'primaryTextColor': '#1a1a1a', 'primaryBorderColor': '#4a90d9', 'lineColor': '#4a90d9'}}}%% +sequenceDiagram + participant FS as File System + participant RFC as ReactiveFileCollection + participant FD as file_data + participant DECLS as decls + participant DBP as decl_by_path + participant REFS as refs + participant SOLVER as Solver + + Note over FS,SOLVER: File.cmt changes on disk + + FS->>RFC: mtime/size changed + RFC->>RFC: read_cmt + process + RFC->>FD: Set("File.res", new_file_data) + + FD->>DECLS: Remove(old_pos₁), Remove(old_pos₂), ... + FD->>DECLS: Set(new_pos₁, decl₁), Set(new_pos₂, decl₂), ... + + DECLS->>DBP: Update affected paths only + DBP->>DBP: Recalculate merged lists + + DBP->>REFS: Set(pos, updated_refs) + + Note over SOLVER: Solver sees updated refs immediately + diff --git a/analysis/reanalyze/diagrams/delta-propagation.svg b/analysis/reanalyze/diagrams/delta-propagation.svg new file mode 100644 index 0000000000..06bd47c050 --- /dev/null +++ b/analysis/reanalyze/diagrams/delta-propagation.svg @@ -0,0 +1 @@ +Solverrefsdecl_by_pathdeclsfile_dataReactiveFileCollectionFile SystemSolverrefsdecl_by_pathdeclsfile_dataReactiveFileCollectionFile SystemFile.cmt changes on diskSolver sees updated refs immediatelymtime/size changedread_cmt + processSet("File.res", new_file_data)Remove(old_pos₁), Remove(old_pos₂), ...Set(new_pos₁, decl₁), Set(new_pos₂, decl₂), ...Update affected paths onlyRecalculate merged listsSet(pos, updated_refs) \ No newline at end of file diff --git a/analysis/reanalyze/diagrams/reactive-pipeline-full.mmd b/analysis/reanalyze/diagrams/reactive-pipeline-full.mmd new file mode 100644 index 0000000000..cf2fc700d4 --- /dev/null +++ b/analysis/reanalyze/diagrams/reactive-pipeline-full.mmd @@ -0,0 +1,186 @@ +graph TD + annotations[annotations] + exc_refs.exception_decls[exc_refs.exception_decls] + type_deps.same_path_refs[type_deps.same_path_refs] + file_collection[file_collection] + decl_refs.value_decl_refs[decl_refs.value_decl_refs] + type_deps.combined_refs_to[type_deps.combined_refs_to] + type_deps.all_type_refs_from[type_deps.all_type_refs_from] + type_deps.impl_needing_path2[type_deps.impl_needing_path2] + exc_refs.resolved_refs_from[exc_refs.resolved_refs_from] + exc_refs.resolved_refs[exc_refs.resolved_refs] + type_deps.impl_to_intf_refs_path2[type_deps.impl_to_intf_refs_path2] + file_deps_map[file_deps_map] + decl_refs.with_value_refs[decl_refs.with_value_refs] + type_deps.u1[type_deps.u1] + cross_file_items[cross_file_items] + decl_refs.decls_by_file[decl_refs.decls_by_file] + type_deps.impl_to_intf_refs[type_deps.impl_to_intf_refs] + solver.issues_by_file[solver.issues_by_file] + liveness.annotated_roots[liveness.annotated_roots] + solver.incorrect_dead_decls[solver.incorrect_dead_decls] + type_deps.intf_to_impl_refs[type_deps.intf_to_impl_refs] + type_deps.decl_by_path[type_deps.decl_by_path] + type_deps.u2[type_deps.u2] + solver.live_decls[solver.live_decls] + type_deps.impl_decls[type_deps.impl_decls] + liveness.all_roots[liveness.all_roots] + solver.dead_modules[solver.dead_modules] + liveness.external_type_refs[liveness.external_type_refs] + decl_refs.combined[decl_refs.combined] + type_refs_from[type_refs_from] + liveness.type_refs_from[liveness.type_refs_from] + solver.dead_decls_by_file[solver.dead_decls_by_file] + liveness.external_value_refs[liveness.external_value_refs] + liveness.value_refs_from[liveness.value_refs_from] + value_refs_from[value_refs_from] + solver.modules_with_dead[solver.modules_with_dead] + solver.dead_decls[solver.dead_decls] + exception_refs_collection[exception_refs_collection] + type_deps.intf_decls[type_deps.intf_decls] + file_data_collection[file_data_collection] + solver.dead_module_issues[solver.dead_module_issues] + decl_refs.with_type_refs[decl_refs.with_type_refs] + solver.modules_with_live[solver.modules_with_live] + decl_refs.type_decl_refs[decl_refs.type_decl_refs] + files[files] + solver.modules_with_reported[solver.modules_with_reported] + liveness.externally_referenced[liveness.externally_referenced] + liveness.edges[liveness.edges] + liveness.live[liveness.live] + decls[decls] + type_deps.intf_to_impl_refs_join{join} + liveness.external_value_refs_join{join} + type_deps.impl_to_intf_refs_path2_join{join} + solver.incorrect_dead_decls_join{join} + liveness.value_refs_from_union{union} + exc_refs.resolved_refs_join{join} + type_deps.combined_refs_to_union{union} + liveness.externally_referenced_union{union} + solver.dead_modules_join{join} + liveness.all_roots_union{union} + solver.dead_module_issues_join{join} + solver.dead_decls_join{join} + liveness.annotated_roots_join{join} + decl_refs.value_decl_refs_join{join} + type_deps.impl_to_intf_refs_join{join} + solver.live_decls_join{join} + decl_refs.with_value_refs_join{join} + liveness.type_refs_from_union{union} + type_deps.impl_needing_path2_join{join} + liveness.external_type_refs_join{join} + liveness.live_fp{fixpoint} + decl_refs.type_decl_refs_join{join} + type_deps.u2_union{union} + decl_refs.combined_join{join} + type_deps.u1_union{union} + decl_refs.with_type_refs_join{join} + annotations --> solver.incorrect_dead_decls_join + annotations --> liveness.annotated_roots_join + exc_refs.exception_decls --> exc_refs.resolved_refs_join + type_deps.same_path_refs --> type_deps.u1_union + file_collection -->|flatMap| file_data_collection + decl_refs.value_decl_refs --> decl_refs.with_value_refs_join + type_deps.combined_refs_to -->|flatMap| type_deps.all_type_refs_from + type_deps.all_type_refs_from --> liveness.type_refs_from_union + type_deps.impl_needing_path2 --> type_deps.impl_to_intf_refs_path2_join + exc_refs.resolved_refs_from --> liveness.value_refs_from_union + exc_refs.resolved_refs -->|flatMap| exc_refs.resolved_refs_from + type_deps.impl_to_intf_refs_path2 --> type_deps.u2_union + decl_refs.with_value_refs --> decl_refs.combined_join + type_deps.u1 --> type_deps.u2_union + cross_file_items -->|flatMap| exception_refs_collection + decl_refs.decls_by_file --> decl_refs.type_decl_refs_join + decl_refs.decls_by_file --> decl_refs.value_decl_refs_join + type_deps.impl_to_intf_refs --> type_deps.u1_union + solver.issues_by_file -->|flatMap| solver.modules_with_reported + liveness.annotated_roots --> liveness.all_roots_union + type_deps.intf_to_impl_refs --> type_deps.combined_refs_to_union + type_deps.decl_by_path --> type_deps.intf_to_impl_refs_join + type_deps.decl_by_path --> type_deps.impl_to_intf_refs_path2_join + type_deps.decl_by_path --> type_deps.impl_needing_path2_join + type_deps.decl_by_path --> type_deps.impl_to_intf_refs_join + type_deps.decl_by_path -->|flatMap| type_deps.same_path_refs + type_deps.u2 --> type_deps.combined_refs_to_union + solver.live_decls --> solver.incorrect_dead_decls_join + solver.live_decls -->|flatMap| solver.modules_with_live + type_deps.impl_decls --> type_deps.impl_needing_path2_join + type_deps.impl_decls --> type_deps.impl_to_intf_refs_join + liveness.all_roots --> liveness.live_fp + solver.dead_modules --> solver.dead_module_issues_join + liveness.external_type_refs --> liveness.externally_referenced_union + decl_refs.combined -->|flatMap| liveness.edges + type_refs_from --> liveness.type_refs_from_union + liveness.type_refs_from --> liveness.external_type_refs_join + liveness.type_refs_from --> decl_refs.type_decl_refs_join + solver.dead_decls_by_file -->|flatMap| solver.issues_by_file + liveness.external_value_refs --> liveness.externally_referenced_union + liveness.value_refs_from --> liveness.external_value_refs_join + liveness.value_refs_from --> decl_refs.value_decl_refs_join + value_refs_from --> liveness.value_refs_from_union + solver.modules_with_dead --> solver.dead_modules_join + solver.dead_decls -->|flatMap| solver.dead_decls_by_file + solver.dead_decls -->|flatMap| solver.modules_with_dead + exception_refs_collection --> exc_refs.resolved_refs_join + type_deps.intf_decls --> type_deps.intf_to_impl_refs_join + file_data_collection -->|flatMap| files + file_data_collection -->|flatMap| file_deps_map + file_data_collection -->|flatMap| cross_file_items + file_data_collection -->|flatMap| type_refs_from + file_data_collection -->|flatMap| value_refs_from + file_data_collection -->|flatMap| annotations + file_data_collection -->|flatMap| decls + decl_refs.with_type_refs --> decl_refs.combined_join + solver.modules_with_live --> solver.dead_modules_join + decl_refs.type_decl_refs --> decl_refs.with_type_refs_join + solver.modules_with_reported --> solver.dead_module_issues_join + liveness.externally_referenced --> liveness.all_roots_union + liveness.edges --> liveness.live_fp + liveness.live --> solver.live_decls_join + liveness.live --> solver.dead_decls_join + decls --> solver.live_decls_join + decls --> solver.dead_decls_join + decls --> liveness.annotated_roots_join + decls --> liveness.external_type_refs_join + decls --> liveness.external_value_refs_join + decls --> decl_refs.with_type_refs_join + decls --> decl_refs.with_value_refs_join + decls -->|flatMap| decl_refs.decls_by_file + decls -->|flatMap| exc_refs.exception_decls + decls -->|flatMap| type_deps.intf_decls + decls -->|flatMap| type_deps.impl_decls + decls -->|flatMap| type_deps.decl_by_path + type_deps.intf_to_impl_refs_join --> type_deps.intf_to_impl_refs + liveness.external_value_refs_join --> liveness.external_value_refs + type_deps.impl_to_intf_refs_path2_join --> type_deps.impl_to_intf_refs_path2 + solver.incorrect_dead_decls_join --> solver.incorrect_dead_decls + liveness.value_refs_from_union --> liveness.value_refs_from + exc_refs.resolved_refs_join --> exc_refs.resolved_refs + type_deps.combined_refs_to_union --> type_deps.combined_refs_to + liveness.externally_referenced_union --> liveness.externally_referenced + solver.dead_modules_join --> solver.dead_modules + liveness.all_roots_union --> liveness.all_roots + solver.dead_module_issues_join --> solver.dead_module_issues + solver.dead_decls_join --> solver.dead_decls + liveness.annotated_roots_join --> liveness.annotated_roots + decl_refs.value_decl_refs_join --> decl_refs.value_decl_refs + type_deps.impl_to_intf_refs_join --> type_deps.impl_to_intf_refs + solver.live_decls_join --> solver.live_decls + decl_refs.with_value_refs_join --> decl_refs.with_value_refs + liveness.type_refs_from_union --> liveness.type_refs_from + type_deps.impl_needing_path2_join --> type_deps.impl_needing_path2 + liveness.external_type_refs_join --> liveness.external_type_refs + liveness.live_fp --> liveness.live + decl_refs.type_decl_refs_join --> decl_refs.type_decl_refs + type_deps.u2_union --> type_deps.u2 + decl_refs.combined_join --> decl_refs.combined + type_deps.u1_union --> type_deps.u1 + decl_refs.with_type_refs_join --> decl_refs.with_type_refs + + classDef joinClass fill:#e6f3ff,stroke:#0066cc + classDef unionClass fill:#fff0e6,stroke:#cc6600 + classDef fixpointClass fill:#e6ffe6,stroke:#006600 + class decl_refs.with_type_refs_join,decl_refs.combined_join,decl_refs.type_decl_refs_join,liveness.external_type_refs_join,type_deps.impl_needing_path2_join,decl_refs.with_value_refs_join,solver.live_decls_join,type_deps.impl_to_intf_refs_join,decl_refs.value_decl_refs_join,liveness.annotated_roots_join,solver.dead_decls_join,solver.dead_module_issues_join,solver.dead_modules_join,exc_refs.resolved_refs_join,solver.incorrect_dead_decls_join,type_deps.impl_to_intf_refs_path2_join,liveness.external_value_refs_join,type_deps.intf_to_impl_refs_join joinClass + class type_deps.u1_union,type_deps.u2_union,liveness.type_refs_from_union,liveness.all_roots_union,liveness.externally_referenced_union,type_deps.combined_refs_to_union,liveness.value_refs_from_union unionClass + class liveness.live_fp fixpointClass + diff --git a/analysis/reanalyze/diagrams/reactive-pipeline.mmd b/analysis/reanalyze/diagrams/reactive-pipeline.mmd new file mode 100644 index 0000000000..d3ba094a50 --- /dev/null +++ b/analysis/reanalyze/diagrams/reactive-pipeline.mmd @@ -0,0 +1,119 @@ +%%{init: {'theme': 'base', 'themeVariables': { 'primaryColor': '#e8f4fd', 'primaryTextColor': '#1a1a1a', 'primaryBorderColor': '#4a90d9', 'lineColor': '#4a90d9', 'secondaryColor': '#f0f7e6', 'tertiaryColor': '#fff5e6'}}}%% +flowchart TB + subgraph FileLayer["File Layer"] + file_collection[("file_collection")] + end + + subgraph FileData["Per-File Data"] + file_data[file_data] + end + + subgraph Extracted["Extracted (ReactiveMerge)"] + decls[decls] + annotations[annotations] + value_refs[value_refs] + type_refs[type_refs] + cross_file_items[cross_file_items] + end + + subgraph TypeDeps["ReactiveTypeDeps"] + decl_by_path[decl_by_path] + all_type_refs[all_type_refs] + end + + subgraph ExcRefs["ReactiveExceptionRefs"] + exception_refs[exception_refs] + exception_decls[exception_decls] + resolved_refs[resolved_refs] + end + + subgraph DeclRefs["ReactiveDeclRefs"] + combined_refs[combined_refs] + end + + subgraph Liveness["ReactiveLiveness"] + roots[roots] + edges[edges] + live[live
fixpoint] + end + + subgraph Solver["ReactiveSolver"] + dead_decls[dead_decls] + live_decls[live_decls] + dead_modules[dead_modules] + dead_by_file[dead_by_file] + issues_by_file[issues_by_file] + incorrect_dead[incorrect_dead] + module_issues[module_issues] + end + + subgraph Report["Report (iter only)"] + OUTPUT[("REPORT")] + end + + file_collection -->|process| file_data + file_data -->|flatMap| decls + file_data -->|flatMap| annotations + file_data -->|flatMap| value_refs + file_data -->|flatMap| type_refs + file_data -->|flatMap| cross_file_items + + decls -->|flatMap| decl_by_path + decl_by_path -->|union+join| all_type_refs + + cross_file_items -->|flatMap| exception_refs + decls -->|flatMap| exception_decls + exception_refs -->|join| resolved_refs + exception_decls -->|join| resolved_refs + + decls --> combined_refs + value_refs --> combined_refs + type_refs --> combined_refs + all_type_refs --> combined_refs + resolved_refs --> combined_refs + + decls --> roots + annotations --> roots + + combined_refs -->|flatMap| edges + roots --> live + edges --> live + + decls -->|join| dead_decls + live -->|NOT in| dead_decls + decls -->|join| live_decls + live -->|IN| live_decls + + dead_decls --> dead_modules + live_decls --> dead_modules + + dead_decls -->|flatMap| dead_by_file + dead_by_file -->|flatMap| issues_by_file + + live_decls -->|join @dead| incorrect_dead + annotations --> incorrect_dead + + dead_modules -->|join| module_issues + issues_by_file --> module_issues + + issues_by_file -->|iter| OUTPUT + incorrect_dead -->|iter| OUTPUT + module_issues -->|iter| OUTPUT + + classDef fileLayer fill:#e8f4fd,stroke:#4a90d9,stroke-width:2px + classDef extracted fill:#f0f7e6,stroke:#6b8e23,stroke-width:2px + classDef typeDeps fill:#fff5e6,stroke:#d4a574,stroke-width:2px + classDef excDeps fill:#f5e6ff,stroke:#9966cc,stroke-width:2px + classDef declRefs fill:#e6f0ff,stroke:#4a74d9,stroke-width:2px + classDef liveness fill:#ffe6e6,stroke:#cc6666,stroke-width:2px + classDef solver fill:#ffe6f0,stroke:#cc6699,stroke-width:2px + classDef output fill:#e6ffe6,stroke:#2e8b2e,stroke-width:2px + + class file_collection,file_data fileLayer + class decls,annotations,value_refs,type_refs,cross_file_items extracted + class decl_by_path,all_type_refs typeDeps + class exception_refs,exception_decls,resolved_refs excDeps + class combined_refs declRefs + class roots,edges,live liveness + class dead_decls,live_decls,dead_modules,dead_by_file,issues_by_file,incorrect_dead,module_issues solver + class OUTPUT output diff --git a/analysis/reanalyze/diagrams/reactive-pipeline.svg b/analysis/reanalyze/diagrams/reactive-pipeline.svg new file mode 100644 index 0000000000..6065d0843d --- /dev/null +++ b/analysis/reanalyze/diagrams/reactive-pipeline.svg @@ -0,0 +1 @@ +

Report (iter only)

ReactiveSolver

ReactiveLiveness

ReactiveDeclRefs

ReactiveExceptionRefs

ReactiveTypeDeps

Extracted (ReactiveMerge)

Per-File Data

File Layer

process

flatMap

flatMap

flatMap

flatMap

flatMap

flatMap

union+join

flatMap

flatMap

join

join

flatMap

join

NOT in

join

IN

flatMap

flatMap

join @dead

join

iter

iter

iter

file_collection

file_data

decls

annotations

value_refs

type_refs

cross_file_items

decl_by_path

all_type_refs

exception_refs

exception_decls

resolved_refs

combined_refs

roots

edges

live
fixpoint

dead_decls

live_decls

dead_modules

dead_by_file

issues_by_file

incorrect_dead

module_issues

REPORT

\ No newline at end of file diff --git a/analysis/reanalyze/src/AnnotationStore.ml b/analysis/reanalyze/src/AnnotationStore.ml new file mode 100644 index 0000000000..b34dbce8e7 --- /dev/null +++ b/analysis/reanalyze/src/AnnotationStore.ml @@ -0,0 +1,34 @@ +(** Abstraction over annotation storage. + + Allows the solver to work with either: + - [Frozen]: Traditional [FileAnnotations.t] (copied from reactive) + - [Reactive]: Direct [Reactive.t] (no copy, zero-cost on warm runs) *) + +type t = + | Frozen of FileAnnotations.t + | Reactive of (Lexing.position, FileAnnotations.annotated_as) Reactive.t + +let of_frozen ann = Frozen ann + +let of_reactive reactive = Reactive reactive + +let is_annotated_dead t pos = + match t with + | Frozen ann -> FileAnnotations.is_annotated_dead ann pos + | Reactive reactive -> Reactive.get reactive pos = Some FileAnnotations.Dead + +let is_annotated_gentype_or_live t pos = + match t with + | Frozen ann -> FileAnnotations.is_annotated_gentype_or_live ann pos + | Reactive reactive -> ( + match Reactive.get reactive pos with + | Some (FileAnnotations.Live | FileAnnotations.GenType) -> true + | Some FileAnnotations.Dead | None -> false) + +let is_annotated_gentype_or_dead t pos = + match t with + | Frozen ann -> FileAnnotations.is_annotated_gentype_or_dead ann pos + | Reactive reactive -> ( + match Reactive.get reactive pos with + | Some (FileAnnotations.Dead | FileAnnotations.GenType) -> true + | Some FileAnnotations.Live | None -> false) diff --git a/analysis/reanalyze/src/AnnotationStore.mli b/analysis/reanalyze/src/AnnotationStore.mli new file mode 100644 index 0000000000..0c8e099fd8 --- /dev/null +++ b/analysis/reanalyze/src/AnnotationStore.mli @@ -0,0 +1,19 @@ +(** Abstraction over annotation storage. + + Allows the solver to work with either: + - [Frozen]: Traditional [FileAnnotations.t] (copied from reactive) + - [Reactive]: Direct [Reactive.t] (no copy, zero-cost on warm runs) *) + +type t +(** Abstract annotation store *) + +val of_frozen : FileAnnotations.t -> t +(** Wrap a frozen [FileAnnotations.t] *) + +val of_reactive : + (Lexing.position, FileAnnotations.annotated_as) Reactive.t -> t +(** Wrap a reactive collection directly (no copy) *) + +val is_annotated_dead : t -> Lexing.position -> bool +val is_annotated_gentype_or_live : t -> Lexing.position -> bool +val is_annotated_gentype_or_dead : t -> Lexing.position -> bool diff --git a/analysis/reanalyze/src/Cli.ml b/analysis/reanalyze/src/Cli.ml index 240d369b18..a05ff04e0c 100644 --- a/analysis/reanalyze/src/Cli.ml +++ b/analysis/reanalyze/src/Cli.ml @@ -27,3 +27,15 @@ let parallel = ref 0 (* timing: report internal timing of analysis phases *) let timing = ref false + +(* use reactive/incremental analysis (caches processed file_data) *) +let reactive = ref false + +(* number of analysis runs (for benchmarking reactive mode) *) +let runs = ref 1 + +(* number of files to churn (remove/re-add) between runs for incremental testing *) +let churn = ref 0 + +(* output mermaid diagram of reactive pipeline *) +let mermaid = ref false diff --git a/analysis/reanalyze/src/CrossFileItems.ml b/analysis/reanalyze/src/CrossFileItems.ml index cf038fdb8f..f51e55a468 100644 --- a/analysis/reanalyze/src/CrossFileItems.ml +++ b/analysis/reanalyze/src/CrossFileItems.ml @@ -58,6 +58,15 @@ let merge_all (builders : builder list) : t = let function_refs = builders |> List.concat_map (fun b -> b.function_refs) in {exception_refs; optional_arg_calls; function_refs} +(** {2 Builder extraction for reactive merge} *) + +let builder_to_t (builder : builder) : t = + { + exception_refs = builder.exception_refs; + optional_arg_calls = builder.optional_arg_calls; + function_refs = builder.function_refs; + } + (** {2 Processing API} *) let process_exception_refs (t : t) ~refs ~file_deps ~find_exception ~config = @@ -69,42 +78,3 @@ let process_exception_refs (t : t) ~refs ~file_deps ~find_exception ~config = DeadCommon.addValueReference ~config ~refs ~file_deps ~binding:Location.none ~addFileReference:true ~locFrom:loc_from ~locTo:loc_to) - -(** Compute optional args state from calls and function references. - Returns a map from position to final OptionalArgs.t state. - Pure function - does not mutate declarations. *) -let compute_optional_args_state (t : t) ~decls ~is_live : OptionalArgsState.t = - let state = OptionalArgsState.create () in - (* Initialize state from declarations *) - let get_state pos = - match OptionalArgsState.find_opt state pos with - | Some s -> s - | None -> ( - match Declarations.find_opt decls pos with - | Some {declKind = Value {optionalArgs}} -> optionalArgs - | _ -> OptionalArgs.empty) - in - let set_state pos s = OptionalArgsState.set state pos s in - (* Process optional arg calls *) - t.optional_arg_calls - |> List.iter (fun {pos_from; pos_to; arg_names; arg_names_maybe} -> - if is_live pos_from then - let current = get_state pos_to in - let updated = - OptionalArgs.apply_call ~argNames:arg_names - ~argNamesMaybe:arg_names_maybe current - in - set_state pos_to updated); - (* Process function references *) - t.function_refs - |> List.iter (fun {pos_from; pos_to} -> - if is_live pos_from then - let state_from = get_state pos_from in - let state_to = get_state pos_to in - if not (OptionalArgs.isEmpty state_to) then ( - let updated_from, updated_to = - OptionalArgs.combine_pair state_from state_to - in - set_state pos_from updated_from; - set_state pos_to updated_to)); - state diff --git a/analysis/reanalyze/src/CrossFileItems.mli b/analysis/reanalyze/src/CrossFileItems.mli index 199089baaf..93141b1004 100644 --- a/analysis/reanalyze/src/CrossFileItems.mli +++ b/analysis/reanalyze/src/CrossFileItems.mli @@ -5,9 +5,26 @@ - [builder] - mutable, for AST processing - [t] - immutable, for processing after merge *) +(** {2 Item types} *) + +type exception_ref = {exception_path: DcePath.t; loc_from: Location.t} + +type optional_arg_call = { + pos_from: Lexing.position; + pos_to: Lexing.position; + arg_names: string list; + arg_names_maybe: string list; +} + +type function_ref = {pos_from: Lexing.position; pos_to: Lexing.position} + (** {2 Types} *) -type t +type t = { + exception_refs: exception_ref list; + optional_arg_calls: optional_arg_call list; + function_refs: function_ref list; +} (** Immutable cross-file items - for processing after merge *) type builder @@ -39,6 +56,11 @@ val add_function_reference : val merge_all : builder list -> t (** Merge all builders into one immutable result. Order doesn't matter. *) +(** {2 Builder extraction for reactive merge} *) + +val builder_to_t : builder -> t +(** Convert builder to t for reactive merge *) + (** {2 Processing API - for after merge} *) val process_exception_refs : @@ -52,11 +74,6 @@ val process_exception_refs : (** {2 Optional Args State} *) -val compute_optional_args_state : - t -> - decls:Declarations.t -> - is_live:(Lexing.position -> bool) -> - OptionalArgsState.t (** Compute final optional args state from calls and function references, taking into account caller liveness via the [is_live] predicate. Pure function - does not mutate declarations. *) diff --git a/analysis/reanalyze/src/CrossFileItemsStore.ml b/analysis/reanalyze/src/CrossFileItemsStore.ml new file mode 100644 index 0000000000..33e5a756d6 --- /dev/null +++ b/analysis/reanalyze/src/CrossFileItemsStore.ml @@ -0,0 +1,68 @@ +(** Abstraction over cross-file items storage. + + Allows iteration over optional arg calls and function refs from either: + - [Frozen]: Collected [CrossFileItems.t] + - [Reactive]: Direct iteration over reactive collection (no intermediate allocation) *) + +type t = + | Frozen of CrossFileItems.t + | Reactive of (string, CrossFileItems.t) Reactive.t + +let of_frozen cfi = Frozen cfi + +let of_reactive reactive = Reactive reactive + +let iter_optional_arg_calls t f = + match t with + | Frozen cfi -> List.iter f cfi.CrossFileItems.optional_arg_calls + | Reactive r -> + Reactive.iter + (fun _path items -> List.iter f items.CrossFileItems.optional_arg_calls) + r + +let iter_function_refs t f = + match t with + | Frozen cfi -> List.iter f cfi.CrossFileItems.function_refs + | Reactive r -> + Reactive.iter + (fun _path items -> List.iter f items.CrossFileItems.function_refs) + r + +(** Compute optional args state from calls and function references. + Returns a map from position to final OptionalArgs.t state. + Pure function - does not mutate declarations. *) +let compute_optional_args_state (store : t) ~find_decl ~is_live : + OptionalArgsState.t = + let state = OptionalArgsState.create () in + (* Initialize state from declarations *) + let get_state pos = + match OptionalArgsState.find_opt state pos with + | Some s -> s + | None -> ( + match find_decl pos with + | Some {Decl.declKind = Value {optionalArgs}} -> optionalArgs + | _ -> OptionalArgs.empty) + in + let set_state pos s = OptionalArgsState.set state pos s in + (* Process optional arg calls *) + iter_optional_arg_calls store + (fun {CrossFileItems.pos_from; pos_to; arg_names; arg_names_maybe} -> + if is_live pos_from then + let current = get_state pos_to in + let updated = + OptionalArgs.apply_call ~argNames:arg_names + ~argNamesMaybe:arg_names_maybe current + in + set_state pos_to updated); + (* Process function references *) + iter_function_refs store (fun {CrossFileItems.pos_from; pos_to} -> + if is_live pos_from then + let state_from = get_state pos_from in + let state_to = get_state pos_to in + if not (OptionalArgs.isEmpty state_to) then ( + let updated_from, updated_to = + OptionalArgs.combine_pair state_from state_to + in + set_state pos_from updated_from; + set_state pos_to updated_to)); + state diff --git a/analysis/reanalyze/src/CrossFileItemsStore.mli b/analysis/reanalyze/src/CrossFileItemsStore.mli new file mode 100644 index 0000000000..98eda6d3d7 --- /dev/null +++ b/analysis/reanalyze/src/CrossFileItemsStore.mli @@ -0,0 +1,30 @@ +(** Abstraction over cross-file items storage. + + Allows iteration over optional arg calls and function refs from either: + - [Frozen]: Collected [CrossFileItems.t] + - [Reactive]: Direct iteration over reactive collection (no intermediate allocation) *) + +type t = + | Frozen of CrossFileItems.t + | Reactive of (string, CrossFileItems.t) Reactive.t + (** Cross-file items store with exposed constructors for pattern matching *) + +val of_frozen : CrossFileItems.t -> t +(** Wrap a frozen [CrossFileItems.t] *) + +val of_reactive : (string, CrossFileItems.t) Reactive.t -> t +(** Wrap reactive collection directly (no intermediate collection) *) + +val iter_optional_arg_calls : + t -> (CrossFileItems.optional_arg_call -> unit) -> unit +(** Iterate over all optional arg calls *) + +val iter_function_refs : t -> (CrossFileItems.function_ref -> unit) -> unit +(** Iterate over all function refs *) + +val compute_optional_args_state : + t -> + find_decl:(Lexing.position -> Decl.t option) -> + is_live:(Lexing.position -> bool) -> + OptionalArgsState.t +(** Compute optional args state from calls and function references *) diff --git a/analysis/reanalyze/src/DeadCommon.ml b/analysis/reanalyze/src/DeadCommon.ml index 9f3ad1f21a..6260f98469 100644 --- a/analysis/reanalyze/src/DeadCommon.ml +++ b/analysis/reanalyze/src/DeadCommon.ml @@ -14,7 +14,6 @@ module Config = struct let analyzeExternals = ref false let reportUnderscore = false let reportTypesDeadOnlyInInterface = false - let recursiveDebug = false let warnOnCircularDependencies = false end @@ -29,13 +28,6 @@ let fileIsImplementationOf s1 s2 = let liveAnnotation = "live" -(* Helper functions for PosHash with PosSet values *) -let posHashFindSet h k = try PosHash.find h k with Not_found -> PosSet.empty - -let posHashAddSet h k v = - let set = posHashFindSet h k in - PosHash.replace h k (PosSet.add v set) - type decls = Decl.t PosHash.t (** type alias for declaration hashtables *) @@ -88,11 +80,6 @@ let addValueReference ~config ~refs ~file_deps ~(binding : Location.t) FileDeps.add_dep file_deps ~from_file:effectiveFrom.loc_start.pos_fname ~to_file:locTo.loc_start.pos_fname) -(* NOTE: iterFilesFromRootsToLeaves moved to FileDeps.iter_files_from_roots_to_leaves *) - -let iterFilesFromRootsToLeaves ~file_deps iterFun = - FileDeps.iter_files_from_roots_to_leaves file_deps iterFun - let addDeclaration_ ~config ~decls ~(file : FileContext.t) ?posEnd ?posStart ~declKind ~path ~(loc : Location.t) ?(posAdjustment = Decl.Nothing) ~moduleLoc (name : Name.t) = @@ -160,12 +147,42 @@ let isInsideReportedValue (ctx : ReportingContext.t) decl = ReportingContext.set_max_end ctx decl.posEnd; insideReportedValue +(** Check if a reference position is "below" the declaration. + A ref is below if it's in a different file, or comes after the declaration + (but not inside it, e.g. not a callback). *) +let refIsBelow (decl : Decl.t) (posFrom : Lexing.position) = + decl.pos.pos_fname <> posFrom.pos_fname + || decl.pos.pos_cnum < posFrom.pos_cnum + && + (* not a function defined inside a function, e.g. not a callback *) + decl.posEnd.pos_cnum < posFrom.pos_cnum + +(** Create hasRefBelow function using on-demand per-decl search. + [iter_value_refs_from] iterates over (posFrom, posToSet) pairs. + O(total_refs) per dead decl, but dead decls should be few. *) +let make_hasRefBelow ~transitive ~iter_value_refs_from = + if transitive then fun _ -> false + else fun decl -> + let found = ref false in + iter_value_refs_from (fun posFrom posToSet -> + if (not !found) && PosSet.mem decl.Decl.pos posToSet then + if refIsBelow decl posFrom then found := true); + !found + (** Report a dead declaration. Returns list of issues (dead module first, then dead value). - Caller is responsible for logging. *) -let reportDeclaration ~config ~refs (ctx : ReportingContext.t) decl : - Issue.t list = + [hasRefBelow] checks if there are references from "below" the declaration. + Only used when [config.run.transitive] is false. + [?checkModuleDead] optional callback for checking dead modules. Defaults to DeadModules.checkModuleDead. + [?shouldReport] optional callback to check if a decl should be reported. Defaults to checking decl.report. *) +let reportDeclaration ~config ~hasRefBelow ?checkModuleDead ?shouldReport + (ctx : ReportingContext.t) decl : Issue.t list = let insideReportedValue = decl |> isInsideReportedValue ctx in - if not decl.report then [] + let should_report = + match shouldReport with + | Some f -> f decl + | None -> decl.report + in + if not should_report then [] else let deadWarning, message = match decl.declKind with @@ -196,29 +213,24 @@ let reportDeclaration ~config ~refs (ctx : ReportingContext.t) decl : | VariantCase -> (WarningDeadType, "is a variant case which is never constructed") in - let hasRefBelow () = - let decl_refs = References.find_value_refs refs decl.pos in - let refIsBelow (pos : Lexing.position) = - decl.pos.pos_fname <> pos.pos_fname - || decl.pos.pos_cnum < pos.pos_cnum - && - (* not a function defined inside a function, e.g. not a callback *) - decl.posEnd.pos_cnum < pos.pos_cnum - in - decl_refs |> PosSet.exists refIsBelow - in let shouldEmitWarning = (not insideReportedValue) && (match decl.path with | name :: _ when name |> Name.isUnderscore -> Config.reportUnderscore | _ -> true) - && (config.DceConfig.run.transitive || not (hasRefBelow ())) + && (config.DceConfig.run.transitive || not (hasRefBelow decl)) in if shouldEmitWarning then - let dead_module_issue = + let moduleName = decl.path |> DcePath.toModuleName ~isType:(decl.declKind |> Decl.Kind.isType) - |> DeadModules.checkModuleDead ~config ~fileName:decl.pos.pos_fname + in + let dead_module_issue = + match checkModuleDead with + | Some f -> f ~fileName:decl.pos.pos_fname moduleName + | None -> + DeadModules.checkModuleDead ~config ~fileName:decl.pos.pos_fname + moduleName in let dead_value_issue = makeDeadIssue ~decl ~message deadWarning in (* Return in order: dead module first (if any), then dead value *) @@ -227,185 +239,258 @@ let reportDeclaration ~config ~refs (ctx : ReportingContext.t) decl : | None -> [dead_value_issue] else [] -let declIsDead ~annotations ~refs decl = - let liveRefs = - refs - |> PosSet.filter (fun p -> - not (FileAnnotations.is_annotated_dead annotations p)) - in - liveRefs |> PosSet.cardinal = 0 - && not - (FileAnnotations.is_annotated_gentype_or_live annotations decl.Decl.pos) - -let doReportDead ~annotations pos = - not (FileAnnotations.is_annotated_gentype_or_dead annotations pos) +let doReportDead ~ann_store pos = + not (AnnotationStore.is_annotated_gentype_or_dead ann_store pos) -let rec resolveRecursiveRefs ~all_refs ~annotations ~config ~decls - ~checkOptionalArg: - (checkOptionalArgFn : config:DceConfig.t -> Decl.t -> Issue.t list) - ~deadDeclarations ~issues ~level ~orderedFiles ~refs ~refsBeingResolved decl - : bool = - match decl.Decl.pos with - | _ when decl.resolvedDead <> None -> - if Config.recursiveDebug then - Log_.item "recursiveDebug %s [%d] already resolved@." - (decl.path |> DcePath.toString) - level; - (* Use the already-resolved value, not source annotations *) - Option.get decl.resolvedDead - | _ when PosSet.mem decl.pos !refsBeingResolved -> - if Config.recursiveDebug then - Log_.item "recursiveDebug %s [%d] is being resolved: assume dead@." - (decl.path |> DcePath.toString) - level; - true - | _ -> - if Config.recursiveDebug then - Log_.item "recursiveDebug resolving %s [%d]@." - (decl.path |> DcePath.toString) - level; - refsBeingResolved := PosSet.add decl.pos !refsBeingResolved; - let allDepsResolved = ref true in - let newRefs = - refs - |> PosSet.filter (fun pos -> - if pos = decl.pos then ( - if Config.recursiveDebug then - Log_.item "recursiveDebug %s ignoring reference to self@." - (decl.path |> DcePath.toString); - false) - else - match Declarations.find_opt decls pos with - | None -> - if Config.recursiveDebug then - Log_.item "recursiveDebug can't find decl for %s@." - (pos |> Pos.toString); - true - | Some xDecl -> - let xRefs = - match xDecl.declKind |> Decl.Kind.isType with - | true -> References.find_type_refs all_refs pos - | false -> References.find_value_refs all_refs pos - in - let xDeclIsDead = - xDecl - |> resolveRecursiveRefs ~all_refs ~annotations ~config ~decls - ~checkOptionalArg:checkOptionalArgFn ~deadDeclarations - ~issues ~level:(level + 1) ~orderedFiles ~refs:xRefs - ~refsBeingResolved - in - if xDecl.resolvedDead = None then allDepsResolved := false; - not xDeclIsDead) - in - let isDead = decl |> declIsDead ~annotations ~refs:newRefs in - let isResolved = (not isDead) || !allDepsResolved || level = 0 in - if isResolved then ( - decl.resolvedDead <- Some isDead; - if isDead then ( - decl.path - |> DeadModules.markDead ~config - ~isType:(decl.declKind |> Decl.Kind.isType) - ~loc:decl.moduleLoc; - if not (doReportDead ~annotations decl.pos) then decl.report <- false; - deadDeclarations := decl :: !deadDeclarations) - else ( - (* Collect optional args issues *) - checkOptionalArgFn ~config decl - |> List.iter (fun issue -> issues := issue :: !issues); - decl.path - |> DeadModules.markLive ~config - ~isType:(decl.declKind |> Decl.Kind.isType) - ~loc:decl.moduleLoc; - if FileAnnotations.is_annotated_dead annotations decl.pos then ( - (* Collect incorrect @dead annotation issue *) - let issue = - makeDeadIssue ~decl ~message:" is annotated @dead but is live" - IncorrectDeadAnnotation - in - decl.path - |> DcePath.toModuleName ~isType:(decl.declKind |> Decl.Kind.isType) - |> DeadModules.checkModuleDead ~config ~fileName:decl.pos.pos_fname - |> Option.iter (fun mod_issue -> issues := mod_issue :: !issues); - issues := issue :: !issues)); - if config.DceConfig.cli.debug then - let refsString = - newRefs |> PosSet.elements |> List.map Pos.toString - |> String.concat ", " - in - Log_.item "%s %s %s: %d references (%s) [%d]@." - (match isDead with - | true -> "Dead" - | false -> "Live") - (decl.declKind |> Decl.Kind.toString) - (decl.path |> DcePath.toString) - (newRefs |> PosSet.cardinal) - refsString level); - isDead - -let solveDead ~annotations ~config ~decls ~refs ~file_deps ~optional_args_state +(** Forward-based solver using refs_from direction. + Computes liveness via forward propagation, then processes declarations. *) +let solveDeadForward ~ann_store ~config ~decl_store ~refs ~optional_args_state ~checkOptionalArg: (checkOptionalArgFn : optional_args_state:OptionalArgsState.t -> - annotations:FileAnnotations.t -> + ann_store:AnnotationStore.t -> config:DceConfig.t -> Decl.t -> Issue.t list) : AnalysisResult.t = - let iterDeclInOrder ~deadDeclarations ~issues ~orderedFiles decl = - let decl_refs = - match decl |> Decl.isValue with - | true -> References.find_value_refs refs decl.pos - | false -> References.find_type_refs refs decl.pos - in - resolveRecursiveRefs ~all_refs:refs ~annotations ~config ~decls - ~checkOptionalArg:(checkOptionalArgFn ~optional_args_state ~annotations) - ~deadDeclarations ~issues ~level:0 ~orderedFiles - ~refsBeingResolved:(ref PosSet.empty) ~refs:decl_refs decl - |> ignore + (* Compute liveness using forward propagation *) + let debug = config.DceConfig.cli.debug in + let transitive = config.DceConfig.run.transitive in + let live = Liveness.compute_forward ~debug ~decl_store ~refs ~ann_store in + + (* hasRefBelow uses on-demand search through refs_from *) + let hasRefBelow = + make_hasRefBelow ~transitive + ~iter_value_refs_from:(References.iter_value_refs_from refs) in - if config.DceConfig.cli.debug then ( - Log_.item "@.File References@.@."; - let fileList = ref [] in - FileDeps.iter_deps file_deps (fun file files -> - fileList := (file, files) :: !fileList); - !fileList - |> List.sort (fun (f1, _) (f2, _) -> String.compare f1 f2) - |> List.iter (fun (file, files) -> - Log_.item "%s -->> %s@." - (file |> Filename.basename) - (files |> FileSet.elements |> List.map Filename.basename - |> String.concat ", "))); - let declarations = - Declarations.fold - (fun _pos decl declarations -> decl :: declarations) - decls [] + + (* Process each declaration based on computed liveness *) + let deadDeclarations = ref [] in + let inline_issues = ref [] in + + (* For consistent debug output, collect and sort declarations *) + let all_decls = + DeclarationStore.fold (fun _pos decl acc -> decl :: acc) decl_store [] + |> List.fast_sort Decl.compareForReporting in - let orderedFiles = Hashtbl.create 256 in - iterFilesFromRootsToLeaves ~file_deps - (let current = ref 0 in - fun fileName -> - incr current; - Hashtbl.add orderedFiles fileName !current); - let orderedDeclarations = - (* analyze in reverse order *) - declarations |> List.fast_sort (Decl.compareUsingDependencies ~orderedFiles) + + all_decls + |> List.iter (fun (decl : Decl.t) -> + let pos = decl.pos in + let live_reason = Liveness.get_live_reason ~live pos in + let is_live = Option.is_some live_reason in + let is_dead = not is_live in + + (* Debug output (forward model): + show reachability + why (root/propagated), without inverse refs. *) + (if debug then + let status = + match live_reason with + | None -> "Dead" + | Some reason -> + Printf.sprintf "Live (%s)" (Liveness.reason_to_string reason) + in + Log_.item "%s %s %s@." status + (decl.declKind |> Decl.Kind.toString) + (decl.path |> DcePath.toString)); + + decl.resolvedDead <- Some is_dead; + + if is_dead then ( + decl.path + |> DeadModules.markDead ~config + ~isType:(decl.declKind |> Decl.Kind.isType) + ~loc:decl.moduleLoc; + if not (doReportDead ~ann_store decl.pos) then decl.report <- false; + deadDeclarations := decl :: !deadDeclarations) + else ( + (* Collect optional args issues for live declarations *) + checkOptionalArgFn ~optional_args_state ~ann_store ~config decl + |> List.iter (fun issue -> inline_issues := issue :: !inline_issues); + decl.path + |> DeadModules.markLive ~config + ~isType:(decl.declKind |> Decl.Kind.isType) + ~loc:decl.moduleLoc; + if AnnotationStore.is_annotated_dead ann_store decl.pos then ( + (* Collect incorrect @dead annotation issue *) + let issue = + makeDeadIssue ~decl ~message:" is annotated @dead but is live" + IncorrectDeadAnnotation + in + decl.path + |> DcePath.toModuleName ~isType:(decl.declKind |> Decl.Kind.isType) + |> DeadModules.checkModuleDead ~config ~fileName:decl.pos.pos_fname + |> Option.iter (fun mod_issue -> + inline_issues := mod_issue :: !inline_issues); + inline_issues := issue :: !inline_issues))); + + let sortedDeadDeclarations = + !deadDeclarations |> List.fast_sort Decl.compareForReporting in + + (* Collect issues from dead declarations *) + let reporting_ctx = ReportingContext.create () in + let dead_issues = + sortedDeadDeclarations + |> List.concat_map (fun decl -> + reportDeclaration ~config ~hasRefBelow reporting_ctx decl) + in + let all_issues = List.rev !inline_issues @ dead_issues in + AnalysisResult.add_issues AnalysisResult.empty all_issues + +(** Reactive solver using reactive liveness collection. + [value_refs_from] is only needed when [transitive=false] for hasRefBelow. + Pass [None] when [transitive=true] to avoid any refs computation. *) +let solveDeadReactive ~ann_store ~config ~decl_store ~value_refs_from + ~(live : (Lexing.position, unit) Reactive.t) + ~(roots : (Lexing.position, unit) Reactive.t) ~optional_args_state + ~checkOptionalArg: + (checkOptionalArgFn : + optional_args_state:OptionalArgsState.t -> + ann_store:AnnotationStore.t -> + config:DceConfig.t -> + Decl.t -> + Issue.t list) : AnalysisResult.t = + let t0 = Unix.gettimeofday () in + let debug = config.DceConfig.cli.debug in + let transitive = config.DceConfig.run.transitive in + let is_live pos = Reactive.get live pos <> None in + + (* hasRefBelow uses on-demand search through value_refs_from *) + let hasRefBelow = + match value_refs_from with + | None -> fun _ -> false + | Some refs_from -> + make_hasRefBelow ~transitive ~iter_value_refs_from:(fun f -> + Reactive.iter f refs_from) + in + + (* Process each declaration based on computed liveness *) let deadDeclarations = ref [] in let inline_issues = ref [] in - orderedDeclarations - |> List.iter - (iterDeclInOrder ~orderedFiles ~deadDeclarations ~issues:inline_issues); + + let t1 = Unix.gettimeofday () in + (* For consistent debug output, collect and sort declarations *) + let all_decls = + DeclarationStore.fold (fun _pos decl acc -> decl :: acc) decl_store [] + in + let t2 = Unix.gettimeofday () in + let all_decls = all_decls |> List.fast_sort Decl.compareForReporting in + let t3 = Unix.gettimeofday () in + let num_decls = List.length all_decls in + + (* Count operations in the loop *) + let num_live_checks = ref 0 in + let num_dead = ref 0 in + let num_live = ref 0 in + + all_decls + |> List.iter (fun (decl : Decl.t) -> + let pos = decl.pos in + incr num_live_checks; + let is_live = is_live pos in + let is_dead = not is_live in + + (* Debug output (forward model): derive root/propagated from [roots]. *) + (if debug then + let live_reason : Liveness.live_reason option = + if not is_live then None + else if Reactive.get roots pos <> None then + if AnnotationStore.is_annotated_gentype_or_live ann_store pos + then Some Liveness.Annotated + else Some Liveness.ExternalRef + else Some Liveness.Propagated + in + let status = + match live_reason with + | None -> "Dead" + | Some reason -> + Printf.sprintf "Live (%s)" (Liveness.reason_to_string reason) + in + Log_.item "%s %s %s@." status + (decl.declKind |> Decl.Kind.toString) + (decl.path |> DcePath.toString)); + + decl.resolvedDead <- Some is_dead; + + if is_dead then ( + incr num_dead; + decl.path + |> DeadModules.markDead ~config + ~isType:(decl.declKind |> Decl.Kind.isType) + ~loc:decl.moduleLoc; + if not (doReportDead ~ann_store decl.pos) then decl.report <- false; + deadDeclarations := decl :: !deadDeclarations) + else ( + incr num_live; + (* Collect optional args issues for live declarations *) + checkOptionalArgFn ~optional_args_state ~ann_store ~config decl + |> List.iter (fun issue -> inline_issues := issue :: !inline_issues); + decl.path + |> DeadModules.markLive ~config + ~isType:(decl.declKind |> Decl.Kind.isType) + ~loc:decl.moduleLoc; + if AnnotationStore.is_annotated_dead ann_store decl.pos then ( + (* Collect incorrect @dead annotation issue *) + let issue = + makeDeadIssue ~decl ~message:" is annotated @dead but is live" + IncorrectDeadAnnotation + in + decl.path + |> DcePath.toModuleName ~isType:(decl.declKind |> Decl.Kind.isType) + |> DeadModules.checkModuleDead ~config ~fileName:decl.pos.pos_fname + |> Option.iter (fun mod_issue -> + inline_issues := mod_issue :: !inline_issues); + inline_issues := issue :: !inline_issues))); + let t4 = Unix.gettimeofday () in + let sortedDeadDeclarations = !deadDeclarations |> List.fast_sort Decl.compareForReporting in + let t5 = Unix.gettimeofday () in + (* Collect issues from dead declarations *) let reporting_ctx = ReportingContext.create () in let dead_issues = sortedDeadDeclarations |> List.concat_map (fun decl -> - reportDeclaration ~config ~refs reporting_ctx decl) + reportDeclaration ~config ~hasRefBelow reporting_ctx decl) in - (* Combine all issues: inline issues first (they were logged during analysis), - then dead declaration issues *) + let t6 = Unix.gettimeofday () in let all_issues = List.rev !inline_issues @ dead_issues in - (* Return result - caller is responsible for logging *) + let t7 = Unix.gettimeofday () in + + Printf.eprintf + " solveDeadReactive timing breakdown:\n\ + \ setup: %6.2fms\n\ + \ collect: %6.2fms (DeclarationStore.fold)\n\ + \ sort: %6.2fms (List.fast_sort %d decls)\n\ + \ iterate: %6.2fms (check liveness for %d decls: %d dead, %d live)\n\ + \ sort_dead: %6.2fms (sort %d dead decls)\n\ + \ report: %6.2fms (generate issues)\n\ + \ combine: %6.2fms\n\ + \ TOTAL: %6.2fms\n" + ((t1 -. t0) *. 1000.0) + ((t2 -. t1) *. 1000.0) + ((t3 -. t2) *. 1000.0) + num_decls + ((t4 -. t3) *. 1000.0) + !num_live_checks !num_dead !num_live + ((t5 -. t4) *. 1000.0) + !num_dead + ((t6 -. t5) *. 1000.0) + ((t7 -. t6) *. 1000.0) + ((t7 -. t0) *. 1000.0); + AnalysisResult.add_issues AnalysisResult.empty all_issues + +(** Main entry point - uses forward solver. *) +let solveDead ~ann_store ~config ~decl_store ~ref_store ~optional_args_state + ~checkOptionalArg : AnalysisResult.t = + match ReferenceStore.get_refs_opt ref_store with + | Some refs -> + solveDeadForward ~ann_store ~config ~decl_store ~refs ~optional_args_state + ~checkOptionalArg + | None -> + failwith + "solveDead: ReferenceStore must be Frozen (use solveDeadReactive for \ + reactive mode)" diff --git a/analysis/reanalyze/src/DeadOptionalArgs.ml b/analysis/reanalyze/src/DeadOptionalArgs.ml index c7fcc93b8e..71bef0ac99 100644 --- a/analysis/reanalyze/src/DeadOptionalArgs.ml +++ b/analysis/reanalyze/src/DeadOptionalArgs.ml @@ -59,12 +59,12 @@ let addReferences ~config ~cross_file ~(locFrom : Location.t) (** Check for optional args issues. Returns issues instead of logging. Uses optional_args_state map for final computed state. *) -let check ~optional_args_state ~annotations ~config:_ decl : Issue.t list = +let check ~optional_args_state ~ann_store ~config:_ decl : Issue.t list = match decl with | {Decl.declKind = Value {optionalArgs}} when active () && not - (FileAnnotations.is_annotated_gentype_or_live annotations decl.pos) + (AnnotationStore.is_annotated_gentype_or_live ann_store decl.pos) -> (* Look up computed state from map, fall back to declaration's initial state *) let state = diff --git a/analysis/reanalyze/src/DeclarationStore.ml b/analysis/reanalyze/src/DeclarationStore.ml new file mode 100644 index 0000000000..7b0043c541 --- /dev/null +++ b/analysis/reanalyze/src/DeclarationStore.ml @@ -0,0 +1,33 @@ +(** Abstraction over declaration storage. + + Allows the solver to work with either: + - [Frozen]: Traditional [Declarations.t] (copied from reactive) + - [Reactive]: Direct [Reactive.t] (no copy, zero-cost on warm runs) + + This eliminates the O(N) freeze step when using reactive mode. *) + +type t = + | Frozen of Declarations.t + | Reactive of (Lexing.position, Decl.t) Reactive.t + +let of_frozen decls = Frozen decls + +let of_reactive reactive = Reactive reactive + +let find_opt t pos = + match t with + | Frozen decls -> Declarations.find_opt decls pos + | Reactive reactive -> Reactive.get reactive pos + +let fold f t init = + match t with + | Frozen decls -> Declarations.fold f decls init + | Reactive reactive -> + let acc = ref init in + Reactive.iter (fun pos decl -> acc := f pos decl !acc) reactive; + !acc + +let iter f t = + match t with + | Frozen decls -> Declarations.iter f decls + | Reactive reactive -> Reactive.iter f reactive diff --git a/analysis/reanalyze/src/DeclarationStore.mli b/analysis/reanalyze/src/DeclarationStore.mli new file mode 100644 index 0000000000..c50583aca1 --- /dev/null +++ b/analysis/reanalyze/src/DeclarationStore.mli @@ -0,0 +1,27 @@ +(** Abstraction over declaration storage. + + Allows the solver to work with either: + - [Frozen]: Traditional [Declarations.t] (copied from reactive) + - [Reactive]: Direct [Reactive.t] (no copy, zero-cost on warm runs) + + This eliminates the O(N) freeze step when using reactive mode. *) + +type t = + | Frozen of Declarations.t + | Reactive of (Lexing.position, Decl.t) Reactive.t + (** Declaration store - either frozen or reactive *) + +val of_frozen : Declarations.t -> t +(** Wrap a frozen [Declarations.t] *) + +val of_reactive : (Lexing.position, Decl.t) Reactive.t -> t +(** Wrap a reactive collection directly (no copy) *) + +val find_opt : t -> Lexing.position -> Decl.t option +(** Look up a declaration by position *) + +val fold : (Lexing.position -> Decl.t -> 'a -> 'a) -> t -> 'a -> 'a +(** Fold over all declarations *) + +val iter : (Lexing.position -> Decl.t -> unit) -> t -> unit +(** Iterate over all declarations *) diff --git a/analysis/reanalyze/src/Declarations.ml b/analysis/reanalyze/src/Declarations.ml index cf49afdd5a..6b8dfedc7d 100644 --- a/analysis/reanalyze/src/Declarations.ml +++ b/analysis/reanalyze/src/Declarations.ml @@ -28,6 +28,13 @@ let merge_all (builders : builder list) : t = PosHash.iter (fun pos decl -> PosHash.replace result pos decl) builder); result +(* ===== Builder extraction for reactive merge ===== *) + +let builder_to_list (builder : builder) : (Lexing.position * Decl.t) list = + PosHash.fold (fun pos decl acc -> (pos, decl) :: acc) builder [] + +let create_from_hashtbl (h : Decl.t PosHash.t) : t = h + (* ===== Read-only API ===== *) let find_opt (t : t) pos = PosHash.find_opt t pos @@ -35,3 +42,5 @@ let find_opt (t : t) pos = PosHash.find_opt t pos let fold f (t : t) init = PosHash.fold f t init let iter f (t : t) = PosHash.iter f t + +let length (t : t) = PosHash.length t diff --git a/analysis/reanalyze/src/Declarations.mli b/analysis/reanalyze/src/Declarations.mli index 31bbb7934a..e6362ee2e9 100644 --- a/analysis/reanalyze/src/Declarations.mli +++ b/analysis/reanalyze/src/Declarations.mli @@ -25,8 +25,18 @@ val replace_builder : builder -> Lexing.position -> Decl.t -> unit val merge_all : builder list -> t (** Merge all builders into one immutable result. Order doesn't matter. *) +(** {2 Builder extraction for reactive merge} *) + +val builder_to_list : builder -> (Lexing.position * Decl.t) list +(** Extract all declarations as a list for reactive merge *) + +val create_from_hashtbl : Decl.t PosHash.t -> t +(** Create from hashtable for reactive merge *) + (** {2 Read-only API for t - for solver} *) val find_opt : t -> Lexing.position -> Decl.t option val fold : (Lexing.position -> Decl.t -> 'a -> 'a) -> t -> 'a -> 'a val iter : (Lexing.position -> Decl.t -> unit) -> t -> unit + +val length : t -> int diff --git a/analysis/reanalyze/src/FileAnnotations.ml b/analysis/reanalyze/src/FileAnnotations.ml index c8344a201f..60e78a0bb9 100644 --- a/analysis/reanalyze/src/FileAnnotations.ml +++ b/analysis/reanalyze/src/FileAnnotations.ml @@ -32,6 +32,14 @@ let merge_all (builders : builder list) : t = builder); result +(* ===== Builder extraction for reactive merge ===== *) + +let builder_to_list (builder : builder) : (Lexing.position * annotated_as) list + = + PosHash.fold (fun pos value acc -> (pos, value) :: acc) builder [] + +let create_from_hashtbl (h : annotated_as PosHash.t) : t = h + (* ===== Read-only API ===== *) let is_annotated_dead (state : t) pos = PosHash.find_opt state pos = Some Dead @@ -45,3 +53,7 @@ let is_annotated_gentype_or_dead (state : t) pos = match PosHash.find_opt state pos with | Some (Dead | GenType) -> true | Some Live | None -> false + +let length (t : t) = PosHash.length t + +let iter f (t : t) = PosHash.iter f t diff --git a/analysis/reanalyze/src/FileAnnotations.mli b/analysis/reanalyze/src/FileAnnotations.mli index dd3df7d861..292b5b5c12 100644 --- a/analysis/reanalyze/src/FileAnnotations.mli +++ b/analysis/reanalyze/src/FileAnnotations.mli @@ -9,6 +9,8 @@ (** {2 Types} *) +type annotated_as = GenType | Dead | Live (** Annotation type *) + type t (** Immutable annotations - for solver (read-only) *) @@ -25,8 +27,18 @@ val annotate_live : builder -> Lexing.position -> unit val merge_all : builder list -> t (** Merge all builders into one immutable result. Order doesn't matter. *) +(** {2 Builder extraction for reactive merge} *) + +val builder_to_list : builder -> (Lexing.position * annotated_as) list +(** Extract all annotations as a list for reactive merge *) + +val create_from_hashtbl : annotated_as PosHash.t -> t +(** Create from hashtable for reactive merge *) + (** {2 Read-only API for t - for solver} *) val is_annotated_dead : t -> Lexing.position -> bool val is_annotated_gentype_or_live : t -> Lexing.position -> bool val is_annotated_gentype_or_dead : t -> Lexing.position -> bool +val length : t -> int +val iter : (Lexing.position -> annotated_as -> unit) -> t -> unit diff --git a/analysis/reanalyze/src/FileDeps.ml b/analysis/reanalyze/src/FileDeps.ml index ed34e7c4c6..ec83cb2896 100644 --- a/analysis/reanalyze/src/FileDeps.ml +++ b/analysis/reanalyze/src/FileDeps.ml @@ -64,6 +64,17 @@ let merge_all (builders : builder list) : t = |> List.iter (fun b -> merge_into_builder ~from:b ~into:merged_builder); freeze_builder merged_builder +(** {2 Builder extraction for reactive merge} *) + +let builder_files (builder : builder) : FileSet.t = builder.files + +let builder_deps_to_list (builder : builder) : (string * FileSet.t) list = + FileHash.fold + (fun from_file to_files acc -> (from_file, to_files) :: acc) + builder.deps [] + +let create ~files ~deps : t = {files; deps} + (** {2 Read-only API} *) let get_files (t : t) = t.files @@ -77,6 +88,10 @@ let iter_deps (t : t) f = FileHash.iter f t.deps let file_exists (t : t) file = FileHash.mem t.deps file +let files_count (t : t) = FileSet.cardinal t.files + +let deps_count (t : t) = FileHash.length t.deps + (** {2 Topological ordering} *) let iter_files_from_roots_to_leaves (t : t) iterFun = diff --git a/analysis/reanalyze/src/FileDeps.mli b/analysis/reanalyze/src/FileDeps.mli index 2975e5ceca..1536d66451 100644 --- a/analysis/reanalyze/src/FileDeps.mli +++ b/analysis/reanalyze/src/FileDeps.mli @@ -35,6 +35,22 @@ val freeze_builder : builder -> t val merge_all : builder list -> t (** Merge all builders into one immutable result. Order doesn't matter. *) +(** {2 Builder extraction for reactive merge} *) + +val builder_files : builder -> FileSet.t +(** Get files set from builder *) + +val builder_deps_to_list : builder -> (string * FileSet.t) list +(** Extract all deps as a list for reactive merge *) + +(** {2 Internal types (for ReactiveMerge)} *) + +module FileHash : Hashtbl.S with type key = string +(** File-keyed hashtable *) + +val create : files:FileSet.t -> deps:FileSet.t FileHash.t -> t +(** Create a FileDeps.t from files set and deps hashtable *) + (** {2 Read-only API for t - for analysis} *) val get_files : t -> FileSet.t @@ -49,6 +65,12 @@ val iter_deps : t -> (string -> FileSet.t -> unit) -> unit val file_exists : t -> string -> bool (** Check if a file exists in the graph. *) +val files_count : t -> int +(** Count of files in the file set. *) + +val deps_count : t -> int +(** Count of dependencies (number of from_file entries). *) + (** {2 Topological ordering} *) val iter_files_from_roots_to_leaves : t -> (string -> unit) -> unit diff --git a/analysis/reanalyze/src/Liveness.ml b/analysis/reanalyze/src/Liveness.ml new file mode 100644 index 0000000000..ea6542f008 --- /dev/null +++ b/analysis/reanalyze/src/Liveness.ml @@ -0,0 +1,238 @@ +(** Forward liveness fixpoint computation. + + Computes the set of live declarations by forward propagation: + 1. Start with roots (inherently live declarations) + 2. For each live declaration, mark what it references as live + 3. Repeat until fixpoint + + Roots include: + - Declarations annotated @live or @genType + - Declarations referenced from non-declaration positions (external uses) + + Note: refs_from is keyed by expression positions, not declaration positions. + We need to find all refs where posFrom is within the declaration's range. *) + +(** Reason why a declaration is live *) +type live_reason = + | Annotated (** Has @live or @genType annotation *) + | ExternalRef (** Referenced from outside any declaration *) + | Propagated (** Referenced by another live declaration *) + +let reason_to_string = function + | Annotated -> "annotated" + | ExternalRef -> "external ref" + | Propagated -> "propagated" + +(** Check if a position is within a declaration's range *) +let pos_in_decl (pos : Lexing.position) (decl : Decl.t) : bool = + pos.pos_fname = decl.pos.pos_fname + && pos.pos_cnum >= decl.posStart.pos_cnum + && pos.pos_cnum <= decl.posEnd.pos_cnum + +(** Build a hashtable mapping posTo -> bool indicating if it has external refs. + External refs are refs where posFrom is NOT a declaration position. + (Matching backward algorithm: it checks find_opt, not range containment) *) +let find_externally_referenced ~(decl_store : DeclarationStore.t) + ~(refs : References.t) : bool PosHash.t = + let externally_referenced = PosHash.create 256 in + + (* Helper: check if posFrom is a declaration position *) + let is_decl_pos posFrom = + DeclarationStore.find_opt decl_store posFrom <> None + in + + (* Check value refs *) + References.iter_value_refs_from refs (fun posFrom posToSet -> + if not (is_decl_pos posFrom) then + PosSet.iter + (fun posTo -> PosHash.replace externally_referenced posTo true) + posToSet); + + (* Check type refs *) + References.iter_type_refs_from refs (fun posFrom posToSet -> + if not (is_decl_pos posFrom) then + PosSet.iter + (fun posTo -> PosHash.replace externally_referenced posTo true) + posToSet); + + externally_referenced + +(** Check if a declaration is inherently live (a root) *) +let is_root ~ann_store ~externally_referenced (decl : Decl.t) = + AnnotationStore.is_annotated_gentype_or_live ann_store decl.pos + || PosHash.mem externally_referenced decl.pos + +(** Build index mapping declaration positions to their outgoing refs. + Done once upfront to avoid O(worklist × refs) in the main loop. + + Optimized by grouping declarations by file first, so we only check + declarations in the same file as each ref source. *) +let build_decl_refs_index ~(decl_store : DeclarationStore.t) + ~(refs : References.t) : (PosSet.t * PosSet.t) PosHash.t = + let index = PosHash.create 256 in + + (* Group declarations by file for efficient lookup *) + let decls_by_file : (string, (Lexing.position * Decl.t) list) Hashtbl.t = + Hashtbl.create 256 + in + DeclarationStore.iter + (fun pos decl -> + let fname = pos.Lexing.pos_fname in + let existing = + try Hashtbl.find decls_by_file fname with Not_found -> [] + in + Hashtbl.replace decls_by_file fname ((pos, decl) :: existing)) + decl_store; + + (* Helper to add targets to a declaration's index entry *) + let add_targets decl_pos targets ~is_type = + let value_targets, type_targets = + match PosHash.find_opt index decl_pos with + | Some pair -> pair + | None -> (PosSet.empty, PosSet.empty) + in + let new_pair = + if is_type then (value_targets, PosSet.union type_targets targets) + else (PosSet.union value_targets targets, type_targets) + in + PosHash.replace index decl_pos new_pair + in + + (* For each ref, find which declaration (in same file) contains its source *) + let process_ref posFrom posToSet ~is_type = + let fname = posFrom.Lexing.pos_fname in + match Hashtbl.find_opt decls_by_file fname with + | None -> () (* No declarations in this file *) + | Some decls_in_file -> + List.iter + (fun (decl_pos, decl) -> + if pos_in_decl posFrom decl then + add_targets decl_pos posToSet ~is_type) + decls_in_file + in + + References.iter_value_refs_from refs (fun posFrom posToSet -> + process_ref posFrom posToSet ~is_type:false); + References.iter_type_refs_from refs (fun posFrom posToSet -> + process_ref posFrom posToSet ~is_type:true); + + index + +(** Compute liveness using forward propagation from roots. + Returns a hashtable mapping positions to their live reason. *) +let compute_forward ~debug ~(decl_store : DeclarationStore.t) + ~(refs : References.t) ~(ann_store : AnnotationStore.t) : + live_reason PosHash.t = + let t0 = Unix.gettimeofday () in + let live = PosHash.create 256 in + let worklist = Queue.create () in + let root_count = ref 0 in + let propagated_count = ref 0 in + + (* Find declarations with external references *) + let externally_referenced = find_externally_referenced ~decl_store ~refs in + + (* Pre-compute index: decl_pos -> (value_targets, type_targets) *) + let decl_refs_index = build_decl_refs_index ~decl_store ~refs in + + if debug then Log_.item "@.Forward Liveness Analysis@.@."; + + (* Initialize with roots *) + DeclarationStore.iter + (fun pos decl -> + if is_root ~ann_store ~externally_referenced decl then ( + incr root_count; + let reason = + if AnnotationStore.is_annotated_gentype_or_live ann_store pos then + Annotated + else ExternalRef + in + PosHash.replace live pos reason; + Queue.push (pos, decl) worklist; + if debug then + Log_.item " Root (%s): %s %s@." (reason_to_string reason) + (decl.declKind |> Decl.Kind.toString) + (decl.path |> DcePath.toString))) + decl_store; + + if debug then Log_.item "@. %d roots found@.@." !root_count; + + (* Forward propagation fixpoint. + For each live declaration, look up its outgoing refs from the index. *) + while not (Queue.is_empty worklist) do + let pos, decl = Queue.pop worklist in + + (* Skip if this position is annotated @dead - don't propagate from it *) + if not (AnnotationStore.is_annotated_dead ann_store pos) then + (* Look up pre-computed targets for this declaration *) + match PosHash.find_opt decl_refs_index pos with + | None -> () (* No outgoing refs from this declaration *) + | Some (value_targets, type_targets) -> + (* Propagate to value targets that are value declarations *) + PosSet.iter + (fun target -> + if not (PosHash.mem live target) then + match DeclarationStore.find_opt decl_store target with + | Some target_decl + when not (target_decl.declKind |> Decl.Kind.isType) -> + incr propagated_count; + PosHash.replace live target Propagated; + Queue.push (target, target_decl) worklist; + if debug then + Log_.item " Propagate: %s -> %s@." + (decl.path |> DcePath.toString) + (target_decl.path |> DcePath.toString) + | Some _ -> + (* Type target from value ref - see below *) + () + | None -> + (* External or non-declaration target *) + PosHash.replace live target Propagated) + value_targets; + + (* Propagate to type targets that are type declarations *) + PosSet.iter + (fun target -> + if not (PosHash.mem live target) then + match DeclarationStore.find_opt decl_store target with + | Some target_decl when target_decl.declKind |> Decl.Kind.isType + -> + incr propagated_count; + PosHash.replace live target Propagated; + Queue.push (target, target_decl) worklist; + if debug then + Log_.item " Propagate: %s -> %s@." + (decl.path |> DcePath.toString) + (target_decl.path |> DcePath.toString) + | Some _ -> + (* Value target from type ref - skip *) + () + | None -> + (* External or non-declaration target *) + PosHash.replace live target Propagated) + type_targets + done; + + if debug then + Log_.item "@. %d declarations marked live via propagation@.@." + !propagated_count; + + let t1 = Unix.gettimeofday () in + if !Cli.timing then + Printf.eprintf + " Liveness.compute_forward: %.3fms (roots=%d, propagated=%d, live=%d)\n\ + %!" + ((t1 -. t0) *. 1000.0) + !root_count !propagated_count (PosHash.length live); + + live + +(** Check if a position is live according to forward-computed liveness *) +let is_live_forward ~(live : live_reason PosHash.t) (pos : Lexing.position) : + bool = + PosHash.mem live pos + +(** Get the reason why a position is live, if it is *) +let get_live_reason ~(live : live_reason PosHash.t) (pos : Lexing.position) : + live_reason option = + PosHash.find_opt live pos diff --git a/analysis/reanalyze/src/Liveness.mli b/analysis/reanalyze/src/Liveness.mli new file mode 100644 index 0000000000..5d34cb9082 --- /dev/null +++ b/analysis/reanalyze/src/Liveness.mli @@ -0,0 +1,36 @@ +(** Forward liveness fixpoint computation. + + Computes the set of live declarations by forward propagation: + 1. Start with roots (inherently live declarations) + 2. For each live declaration, mark what it references as live + 3. Repeat until fixpoint + + Roots include: + - Declarations annotated @live or @genType + - Declarations referenced from non-declaration positions (external uses) *) + +(** Reason why a declaration is live *) +type live_reason = + | Annotated (** Has @live or @genType annotation *) + | ExternalRef (** Referenced from outside any declaration *) + | Propagated (** Referenced by another live declaration *) + +val reason_to_string : live_reason -> string +(** Convert a live reason to a human-readable string *) + +val compute_forward : + debug:bool -> + decl_store:DeclarationStore.t -> + refs:References.t -> + ann_store:AnnotationStore.t -> + live_reason PosHash.t +(** Compute liveness using forward propagation. + Returns a hashtable mapping live positions to their [live_reason]. + Pass [~debug:true] for verbose output. *) + +val is_live_forward : live:live_reason PosHash.t -> Lexing.position -> bool +(** Check if a position is live according to forward-computed liveness *) + +val get_live_reason : + live:live_reason PosHash.t -> Lexing.position -> live_reason option +(** Get the reason why a position is live, if it is *) diff --git a/analysis/reanalyze/src/Log_.ml b/analysis/reanalyze/src/Log_.ml index a50a73cd68..5a03ae5551 100644 --- a/analysis/reanalyze/src/Log_.ml +++ b/analysis/reanalyze/src/Log_.ml @@ -197,6 +197,7 @@ module Stats = struct let issues = ref [] let addIssue (issue : Issue.t) = issues := issue :: !issues let clear () = issues := [] + let get_issue_count () = List.length !issues let getSortedIssues () = let counters2 = Hashtbl.create 1 in diff --git a/analysis/reanalyze/src/ReactiveAnalysis.ml b/analysis/reanalyze/src/ReactiveAnalysis.ml new file mode 100644 index 0000000000..f29b6d04df --- /dev/null +++ b/analysis/reanalyze/src/ReactiveAnalysis.ml @@ -0,0 +1,155 @@ +(** Reactive analysis service using ReactiveFileCollection. + + This module provides incremental analysis that only re-processes + files that have changed, using ReactiveFileCollection for efficient + delta-based updates. *) + +type cmt_file_result = { + dce_data: DceFileProcessing.file_data option; + exception_data: Exception.file_result option; +} +(** Result of processing a single CMT file *) + +type all_files_result = { + dce_data_list: DceFileProcessing.file_data list; + exception_results: Exception.file_result list; +} +(** Result of processing all CMT files *) + +type t = (Cmt_format.cmt_infos, cmt_file_result option) ReactiveFileCollection.t +(** The reactive collection type *) + +(** Process cmt_infos into a file result *) +let process_cmt_infos ~config ~cmtFilePath cmt_infos : cmt_file_result option = + let excludePath sourceFile = + config.DceConfig.cli.exclude_paths + |> List.exists (fun prefix_ -> + let prefix = + match Filename.is_relative sourceFile with + | true -> prefix_ + | false -> Filename.concat (Sys.getcwd ()) prefix_ + in + String.length prefix <= String.length sourceFile + && + try String.sub sourceFile 0 (String.length prefix) = prefix + with Invalid_argument _ -> false) + in + match cmt_infos.Cmt_format.cmt_annots |> FindSourceFile.cmt with + | Some sourceFile when not (excludePath sourceFile) -> + let is_interface = + match cmt_infos.cmt_annots with + | Interface _ -> true + | _ -> Filename.check_suffix sourceFile "i" + in + let module_name = sourceFile |> Paths.getModuleName in + let dce_file_context : DceFileProcessing.file_context = + {source_path = sourceFile; module_name; is_interface} + in + let file_context = + DeadCommon.FileContext. + {source_path = sourceFile; module_name; is_interface} + in + let dce_data = + if config.DceConfig.run.dce then + Some + (cmt_infos + |> DceFileProcessing.process_cmt_file ~config ~file:dce_file_context + ~cmtFilePath) + else None + in + let exception_data = + if config.DceConfig.run.exception_ then + cmt_infos |> Exception.processCmt ~file:file_context + else None + in + if config.DceConfig.run.termination then + cmt_infos |> Arnold.processCmt ~config ~file:file_context; + Some {dce_data; exception_data} + | _ -> None + +(** Create a new reactive collection *) +let create ~config : t = + ReactiveFileCollection.create ~read_file:Cmt_format.read_cmt + ~process:(fun path cmt_infos -> + process_cmt_infos ~config ~cmtFilePath:path cmt_infos) + +(** Process all files incrementally using ReactiveFileCollection. + First run processes all files. Subsequent runs only process changed files. + Uses batch processing to emit all changes as a single Batch delta. *) +let process_files ~(collection : t) ~config:_ cmtFilePaths : all_files_result = + Timing.time_phase `FileLoading (fun () -> + let total_files = List.length cmtFilePaths in + let cached_before = + cmtFilePaths + |> List.filter (fun p -> ReactiveFileCollection.mem collection p) + |> List.length + in + + (* Process all files as a batch - emits single Batch delta *) + let processed = + ReactiveFileCollection.process_files_batch collection cmtFilePaths + in + let from_cache = total_files - processed in + + if !Cli.timing then + Printf.eprintf + "Reactive: %d files processed, %d from cache (was cached: %d)\n%!" + processed from_cache cached_before; + + (* Collect results from the collection *) + let dce_data_list = ref [] in + let exception_results = ref [] in + + ReactiveFileCollection.iter + (fun _path result_opt -> + match result_opt with + | Some {dce_data; exception_data} -> ( + (match dce_data with + | Some data -> dce_data_list := data :: !dce_data_list + | None -> ()); + match exception_data with + | Some data -> exception_results := data :: !exception_results + | None -> ()) + | None -> ()) + collection; + + { + dce_data_list = List.rev !dce_data_list; + exception_results = List.rev !exception_results; + }) + +(** Get collection length *) +let length (collection : t) = ReactiveFileCollection.length collection + +(** Get the underlying reactive collection for composition. + Returns (path, file_data option) suitable for ReactiveMerge. *) +let to_file_data_collection (collection : t) : + (string, DceFileProcessing.file_data option) Reactive.t = + Reactive.flatMap ~name:"file_data_collection" + (ReactiveFileCollection.to_collection collection) + ~f:(fun path result_opt -> + match result_opt with + | Some {dce_data = Some data; _} -> [(path, Some data)] + | _ -> [(path, None)]) + () + +(** Iterate over all file_data in the collection *) +let iter_file_data (collection : t) (f : DceFileProcessing.file_data -> unit) : + unit = + ReactiveFileCollection.iter + (fun _path result_opt -> + match result_opt with + | Some {dce_data = Some data; _} -> f data + | _ -> ()) + collection + +(** Collect all exception results from the collection *) +let collect_exception_results (collection : t) : Exception.file_result list = + let results = ref [] in + ReactiveFileCollection.iter + (fun _path result_opt -> + match result_opt with + | Some {exception_data = Some data; _} -> results := data :: !results + | _ -> ()) + collection; + !results diff --git a/analysis/reanalyze/src/ReactiveDeclRefs.ml b/analysis/reanalyze/src/ReactiveDeclRefs.ml new file mode 100644 index 0000000000..9f5a2ea26c --- /dev/null +++ b/analysis/reanalyze/src/ReactiveDeclRefs.ml @@ -0,0 +1,83 @@ +(** Reactive mapping from declarations to their outgoing references. + + This is the reactive version of [Liveness.build_decl_refs_index]. + + For each declaration, computes the set of positions it references. + Updates incrementally when refs or declarations change. *) + +(** Build reactive index: decl_pos -> (value_targets, type_targets) + + Uses pure reactive combinators - no internal hashtables. *) +let create ~(decls : (Lexing.position, Decl.t) Reactive.t) + ~(value_refs_from : (Lexing.position, PosSet.t) Reactive.t) + ~(type_refs_from : (Lexing.position, PosSet.t) Reactive.t) : + (Lexing.position, PosSet.t * PosSet.t) Reactive.t = + (* Group declarations by file *) + let decls_by_file : (string, (Lexing.position * Decl.t) list) Reactive.t = + Reactive.flatMap ~name:"decl_refs.decls_by_file" decls + ~f:(fun pos decl -> [(pos.Lexing.pos_fname, [(pos, decl)])]) + ~merge:( @ ) () + in + + (* Check if posFrom is contained in decl's range *) + let pos_in_decl (posFrom : Lexing.position) (decl : Decl.t) : bool = + posFrom.pos_fname = decl.pos.pos_fname + && posFrom.pos_cnum >= decl.posStart.pos_cnum + && posFrom.pos_cnum <= decl.posEnd.pos_cnum + in + + (* For each ref, find which decl(s) contain it and output (decl_pos, targets) *) + let value_decl_refs : (Lexing.position, PosSet.t) Reactive.t = + Reactive.join ~name:"decl_refs.value_decl_refs" value_refs_from + decls_by_file + ~key_of:(fun posFrom _targets -> posFrom.Lexing.pos_fname) + ~f:(fun posFrom targets decls_opt -> + match decls_opt with + | None -> [] + | Some decls_in_file -> + decls_in_file + |> List.filter_map (fun (decl_pos, decl) -> + if pos_in_decl posFrom decl then Some (decl_pos, targets) + else None)) + ~merge:PosSet.union () + in + + let type_decl_refs : (Lexing.position, PosSet.t) Reactive.t = + Reactive.join ~name:"decl_refs.type_decl_refs" type_refs_from decls_by_file + ~key_of:(fun posFrom _targets -> posFrom.Lexing.pos_fname) + ~f:(fun posFrom targets decls_opt -> + match decls_opt with + | None -> [] + | Some decls_in_file -> + decls_in_file + |> List.filter_map (fun (decl_pos, decl) -> + if pos_in_decl posFrom decl then Some (decl_pos, targets) + else None)) + ~merge:PosSet.union () + in + + (* Combine value and type refs into (value_targets, type_targets) pairs. + Use join to combine, with decls as the base to ensure all decls are present. *) + let with_value_refs : (Lexing.position, PosSet.t) Reactive.t = + Reactive.join ~name:"decl_refs.with_value_refs" decls value_decl_refs + ~key_of:(fun pos _decl -> pos) + ~f:(fun pos _decl refs_opt -> + [(pos, Option.value refs_opt ~default:PosSet.empty)]) + () + in + + let with_type_refs : (Lexing.position, PosSet.t) Reactive.t = + Reactive.join ~name:"decl_refs.with_type_refs" decls type_decl_refs + ~key_of:(fun pos _decl -> pos) + ~f:(fun pos _decl refs_opt -> + [(pos, Option.value refs_opt ~default:PosSet.empty)]) + () + in + + (* Combine into final (value_targets, type_targets) pairs *) + Reactive.join ~name:"decl_refs.combined" with_value_refs with_type_refs + ~key_of:(fun pos _value_targets -> pos) + ~f:(fun pos value_targets type_targets_opt -> + let type_targets = Option.value type_targets_opt ~default:PosSet.empty in + [(pos, (value_targets, type_targets))]) + () diff --git a/analysis/reanalyze/src/ReactiveDeclRefs.mli b/analysis/reanalyze/src/ReactiveDeclRefs.mli new file mode 100644 index 0000000000..e11f6510b6 --- /dev/null +++ b/analysis/reanalyze/src/ReactiveDeclRefs.mli @@ -0,0 +1,17 @@ +(** Reactive mapping from declarations to their outgoing references. + + This is the reactive version of [Liveness.build_decl_refs_index]. + Updates incrementally when refs or declarations change. + + Next step: combine with a reactive fixpoint combinator for fully + incremental liveness computation. *) + +val create : + decls:(Lexing.position, Decl.t) Reactive.t -> + value_refs_from:(Lexing.position, PosSet.t) Reactive.t -> + type_refs_from:(Lexing.position, PosSet.t) Reactive.t -> + (Lexing.position, PosSet.t * PosSet.t) Reactive.t +(** [create ~decls ~value_refs_from ~type_refs_from] creates a reactive index + mapping each declaration position to its outgoing references. + + Returns [(value_targets, type_targets)] for each declaration. *) diff --git a/analysis/reanalyze/src/ReactiveExceptionRefs.ml b/analysis/reanalyze/src/ReactiveExceptionRefs.ml new file mode 100644 index 0000000000..81e23bfbe6 --- /dev/null +++ b/analysis/reanalyze/src/ReactiveExceptionRefs.ml @@ -0,0 +1,94 @@ +(** Reactive exception reference resolution. + + Expresses exception ref resolution as a reactive join: + - exception_refs: (path, loc_from) from CrossFileItems + - exception_decls: (path, loc_to) indexed from Declarations + - result: value refs (pos_to, pos_from) + + When declarations or exception_refs change, only affected refs update. *) + +(** {1 Types} *) + +type t = { + exception_decls: (DcePath.t, Location.t) Reactive.t; + resolved_refs: (Lexing.position, PosSet.t) Reactive.t; + resolved_refs_from: (Lexing.position, PosSet.t) Reactive.t; +} +(** Reactive exception ref collections *) + +(** {1 Creation} *) + +(** Create reactive exception refs from decls and cross-file exception refs. + + [decls] is the reactive declarations collection. + [exception_refs] is the reactive collection of (path, loc_from) from CrossFileItems. *) +let create ~(decls : (Lexing.position, Decl.t) Reactive.t) + ~(exception_refs : (DcePath.t, Location.t) Reactive.t) : t = + (* Step 1: Index exception declarations by path *) + let exception_decls = + Reactive.flatMap ~name:"exc_refs.exception_decls" decls + ~f:(fun _pos (decl : Decl.t) -> + match decl.Decl.declKind with + | Exception -> + let loc : Location.t = + { + Location.loc_start = decl.pos; + loc_end = decl.posEnd; + loc_ghost = false; + } + in + [(decl.path, loc)] + | _ -> []) + () (* Last-write-wins is fine since paths should be unique *) + in + + (* Step 2: Join exception_refs with exception_decls *) + let resolved_refs = + Reactive.join ~name:"exc_refs.resolved_refs" exception_refs exception_decls + ~key_of:(fun path _loc_from -> path) + ~f:(fun _path loc_from loc_to_opt -> + match loc_to_opt with + | Some loc_to -> + (* Add value reference: pos_to -> pos_from (refs_to direction) *) + [ + ( loc_to.Location.loc_start, + PosSet.singleton loc_from.Location.loc_start ); + ] + | None -> []) + ~merge:PosSet.union () + in + + (* Step 3: Create refs_from direction by inverting *) + let resolved_refs_from = + Reactive.flatMap ~name:"exc_refs.resolved_refs_from" resolved_refs + ~f:(fun posTo posFromSet -> + PosSet.elements posFromSet + |> List.map (fun posFrom -> (posFrom, PosSet.singleton posTo))) + ~merge:PosSet.union () + in + + {exception_decls; resolved_refs; resolved_refs_from} + +(** {1 Freezing} *) + +(** Add all resolved exception refs to a References.builder *) +let add_to_refs_builder (t : t) ~(refs : References.builder) : unit = + Reactive.iter + (fun posTo posFromSet -> + PosSet.iter + (fun posFrom -> References.add_value_ref refs ~posTo ~posFrom) + posFromSet) + t.resolved_refs + +(** Add file dependencies for resolved refs *) +let add_to_file_deps_builder (t : t) ~(file_deps : FileDeps.builder) : unit = + Reactive.iter + (fun posTo posFromSet -> + PosSet.iter + (fun posFrom -> + let from_file = posFrom.Lexing.pos_fname in + let to_file = posTo.Lexing.pos_fname in + if from_file <> to_file then + FileDeps.add_dep file_deps ~from_file ~to_file) + posFromSet) + t.resolved_refs diff --git a/analysis/reanalyze/src/ReactiveExceptionRefs.mli b/analysis/reanalyze/src/ReactiveExceptionRefs.mli new file mode 100644 index 0000000000..8f918d7cfe --- /dev/null +++ b/analysis/reanalyze/src/ReactiveExceptionRefs.mli @@ -0,0 +1,59 @@ +(** Reactive exception reference resolution. + + Expresses exception ref resolution as a reactive join. + When declarations or exception_refs change, only affected refs update. + + {2 Pipeline} + + {[ + decls exception_refs + | | + | flatMap | + ↓ | + exception_decls | + (path → loc) | + ↘ ↙ + join + ↓ + resolved_refs + (pos → PosSet) + ]} + + {2 Example} + + {[ + let exc_refs = ReactiveExceptionRefs.create + ~decls:merged.decls + ~exception_refs:(flatMap cross_file ~f:extract_exception_refs ()) + in + ReactiveExceptionRefs.add_to_refs_builder exc_refs ~refs:my_refs_builder + ]} *) + +(** {1 Types} *) + +type t = { + exception_decls: (DcePath.t, Location.t) Reactive.t; + resolved_refs: (Lexing.position, PosSet.t) Reactive.t; + (** refs_to direction: target -> sources *) + resolved_refs_from: (Lexing.position, PosSet.t) Reactive.t; + (** refs_from direction: source -> targets (for forward solver) *) +} +(** Reactive exception ref collections *) + +(** {1 Creation} *) + +val create : + decls:(Lexing.position, Decl.t) Reactive.t -> + exception_refs:(DcePath.t, Location.t) Reactive.t -> + t +(** Create reactive exception refs from decls and cross-file exception refs. + + When the source collections change, resolved refs automatically update. *) + +(** {1 Freezing} *) + +val add_to_refs_builder : t -> refs:References.builder -> unit +(** Add all resolved exception refs to a References.builder. *) + +val add_to_file_deps_builder : t -> file_deps:FileDeps.builder -> unit +(** Add file dependencies for resolved refs. *) diff --git a/analysis/reanalyze/src/ReactiveLiveness.ml b/analysis/reanalyze/src/ReactiveLiveness.ml new file mode 100644 index 0000000000..4322bd0992 --- /dev/null +++ b/analysis/reanalyze/src/ReactiveLiveness.ml @@ -0,0 +1,134 @@ +(** Reactive liveness computation using fixpoint. + + Computes the set of live declarations by: + 1. Starting from roots (annotated + externally referenced) + 2. Propagating through references via fixpoint + + Uses pure reactive combinators - no internal hashtables. *) + +type t = { + live: (Lexing.position, unit) Reactive.t; + edges: (Lexing.position, Lexing.position list) Reactive.t; + roots: (Lexing.position, unit) Reactive.t; +} + +(** Compute reactive liveness from ReactiveMerge.t *) +let create ~(merged : ReactiveMerge.t) : t = + let decls = merged.decls in + let annotations = merged.annotations in + + (* Combine value refs using union: per-file refs + exception refs *) + let value_refs_from : (Lexing.position, PosSet.t) Reactive.t = + Reactive.union ~name:"liveness.value_refs_from" merged.value_refs_from + merged.exception_refs.resolved_refs_from ~merge:PosSet.union () + in + + (* Combine type refs using union: per-file refs + type deps from ReactiveTypeDeps *) + let type_refs_from : (Lexing.position, PosSet.t) Reactive.t = + Reactive.union ~name:"liveness.type_refs_from" merged.type_refs_from + merged.type_deps.all_type_refs_from ~merge:PosSet.union () + in + + (* Step 1: Build decl_refs_index - maps decl -> (value_targets, type_targets) *) + let decl_refs_index = + ReactiveDeclRefs.create ~decls ~value_refs_from ~type_refs_from + in + + (* Step 2: Convert to edges format for fixpoint: decl -> successor list *) + let edges : (Lexing.position, Lexing.position list) Reactive.t = + Reactive.flatMap ~name:"liveness.edges" decl_refs_index + ~f:(fun pos (value_targets, type_targets) -> + let all_targets = PosSet.union value_targets type_targets in + [(pos, PosSet.elements all_targets)]) + () + in + + (* Step 3: Compute roots - positions that are inherently live *) + (* Root if: annotated @live/@genType OR referenced from outside any decl *) + + (* Compute externally referenced positions reactively. + A position is externally referenced if any reference to it comes from + a position that is NOT a declaration position (exact match). + + This matches the non-reactive algorithm which uses DeclarationStore.find_opt. + + We use join to explicitly track the dependency on decls. When a decl at + position P arrives, any ref with posFrom=P will be reprocessed. *) + let external_value_refs : (Lexing.position, unit) Reactive.t = + Reactive.join ~name:"liveness.external_value_refs" value_refs_from decls + ~key_of:(fun posFrom _targets -> posFrom) + ~f:(fun _posFrom targets decl_opt -> + match decl_opt with + | Some _ -> + (* posFrom IS a decl position, refs are internal *) + [] + | None -> + (* posFrom is NOT a decl position, targets are externally referenced *) + PosSet.elements targets |> List.map (fun posTo -> (posTo, ()))) + ~merge:(fun () () -> ()) + () + in + + let external_type_refs : (Lexing.position, unit) Reactive.t = + Reactive.join ~name:"liveness.external_type_refs" type_refs_from decls + ~key_of:(fun posFrom _targets -> posFrom) + ~f:(fun _posFrom targets decl_opt -> + match decl_opt with + | Some _ -> + (* posFrom IS a decl position, refs are internal *) + [] + | None -> + (* posFrom is NOT a decl position, targets are externally referenced *) + PosSet.elements targets |> List.map (fun posTo -> (posTo, ()))) + ~merge:(fun () () -> ()) + () + in + + let externally_referenced : (Lexing.position, unit) Reactive.t = + Reactive.union ~name:"liveness.externally_referenced" external_value_refs + external_type_refs + ~merge:(fun () () -> ()) + () + in + + (* Compute annotated roots: decls with @live or @genType *) + let annotated_roots : (Lexing.position, unit) Reactive.t = + Reactive.join ~name:"liveness.annotated_roots" decls annotations + ~key_of:(fun pos _decl -> pos) + ~f:(fun pos _decl ann_opt -> + match ann_opt with + | Some FileAnnotations.Live | Some FileAnnotations.GenType -> + [(pos, ())] + | _ -> []) + ~merge:(fun () () -> ()) + () + in + + (* Combine all roots *) + let all_roots : (Lexing.position, unit) Reactive.t = + Reactive.union ~name:"liveness.all_roots" annotated_roots + externally_referenced + ~merge:(fun () () -> ()) + () + in + + (* Step 4: Compute fixpoint - all reachable positions from roots *) + let live = + Reactive.fixpoint ~name:"liveness.live" ~init:all_roots ~edges () + in + {live; edges; roots = all_roots} + +(** Print reactive collection update statistics *) +let print_stats ~(t : t) : unit = + let print name (c : _ Reactive.t) = + let s = Reactive.stats c in + Printf.eprintf + " %s: recv=%d/%d +%d -%d | emit=%d/%d +%d -%d | runs=%d len=%d\n" name + s.deltas_received s.entries_received s.adds_received s.removes_received + s.deltas_emitted s.entries_emitted s.adds_emitted s.removes_emitted + s.process_count (Reactive.length c) + in + Printf.eprintf "ReactiveLiveness stats (recv=d/e/+/- emit=d/e/+/- runs):\n"; + print "roots" t.roots; + print "edges" t.edges; + print "live (fixpoint)" t.live diff --git a/analysis/reanalyze/src/ReactiveLiveness.mli b/analysis/reanalyze/src/ReactiveLiveness.mli new file mode 100644 index 0000000000..e0b5fcf53a --- /dev/null +++ b/analysis/reanalyze/src/ReactiveLiveness.mli @@ -0,0 +1,22 @@ +(** Reactive liveness computation using fixpoint. + + Computes the set of live declarations incrementally. *) + +type t = { + live: (Lexing.position, unit) Reactive.t; + edges: (Lexing.position, Lexing.position list) Reactive.t; + roots: (Lexing.position, unit) Reactive.t; +} + +val create : merged:ReactiveMerge.t -> t +(** [create ~merged] computes reactive liveness from merged DCE data. + + Returns a record containing: + - live: positions that are live (via fixpoint) + - edges: declaration → referenced positions + - roots: initial live positions (annotated + externally referenced) + + Updates automatically when any input changes. *) + +val print_stats : t:t -> unit +(** Print update statistics for liveness collections (roots, edges, live fixpoint) *) diff --git a/analysis/reanalyze/src/ReactiveMerge.ml b/analysis/reanalyze/src/ReactiveMerge.ml new file mode 100644 index 0000000000..f0a340f6c1 --- /dev/null +++ b/analysis/reanalyze/src/ReactiveMerge.ml @@ -0,0 +1,270 @@ +(** Reactive merge of per-file DCE data into global collections. + + Given a reactive collection of (path, file_data), this creates derived + reactive collections that automatically update when source files change. *) + +(** {1 Types} *) + +type t = { + decls: (Lexing.position, Decl.t) Reactive.t; + annotations: (Lexing.position, FileAnnotations.annotated_as) Reactive.t; + value_refs_from: (Lexing.position, PosSet.t) Reactive.t; + type_refs_from: (Lexing.position, PosSet.t) Reactive.t; + cross_file_items: (string, CrossFileItems.t) Reactive.t; + file_deps_map: (string, FileSet.t) Reactive.t; + files: (string, unit) Reactive.t; + (* Reactive type/exception dependencies *) + type_deps: ReactiveTypeDeps.t; + exception_refs: ReactiveExceptionRefs.t; +} +(** All derived reactive collections from per-file data *) + +(** {1 Creation} *) + +let create (source : (string, DceFileProcessing.file_data option) Reactive.t) : + t = + (* Declarations: (pos, Decl.t) with last-write-wins *) + let decls = + Reactive.flatMap ~name:"decls" source + ~f:(fun _path file_data_opt -> + match file_data_opt with + | None -> [] + | Some file_data -> + Declarations.builder_to_list file_data.DceFileProcessing.decls) + () + in + + (* Annotations: (pos, annotated_as) with last-write-wins *) + let annotations = + Reactive.flatMap ~name:"annotations" source + ~f:(fun _path file_data_opt -> + match file_data_opt with + | None -> [] + | Some file_data -> + FileAnnotations.builder_to_list + file_data.DceFileProcessing.annotations) + () + in + + (* Value refs_from: (posFrom, PosSet of targets) with PosSet.union merge *) + let value_refs_from = + Reactive.flatMap ~name:"value_refs_from" source + ~f:(fun _path file_data_opt -> + match file_data_opt with + | None -> [] + | Some file_data -> + References.builder_value_refs_from_list + file_data.DceFileProcessing.refs) + ~merge:PosSet.union () + in + + (* Type refs_from: (posFrom, PosSet of targets) with PosSet.union merge *) + let type_refs_from = + Reactive.flatMap ~name:"type_refs_from" source + ~f:(fun _path file_data_opt -> + match file_data_opt with + | None -> [] + | Some file_data -> + References.builder_type_refs_from_list + file_data.DceFileProcessing.refs) + ~merge:PosSet.union () + in + + (* Cross-file items: (path, CrossFileItems.t) with merge by concatenation *) + let cross_file_items = + Reactive.flatMap ~name:"cross_file_items" source + ~f:(fun path file_data_opt -> + match file_data_opt with + | None -> [] + | Some file_data -> + let items = + CrossFileItems.builder_to_t file_data.DceFileProcessing.cross_file + in + [(path, items)]) + ~merge:(fun a b -> + CrossFileItems. + { + exception_refs = a.exception_refs @ b.exception_refs; + optional_arg_calls = a.optional_arg_calls @ b.optional_arg_calls; + function_refs = a.function_refs @ b.function_refs; + }) + () + in + + (* File deps map: (from_file, FileSet of to_files) with FileSet.union merge *) + let file_deps_map = + Reactive.flatMap ~name:"file_deps_map" source + ~f:(fun _path file_data_opt -> + match file_data_opt with + | None -> [] + | Some file_data -> + FileDeps.builder_deps_to_list file_data.DceFileProcessing.file_deps) + ~merge:FileSet.union () + in + + (* Files set: (source_path, ()) - just track which source files exist *) + let files = + Reactive.flatMap ~name:"files" source + ~f:(fun _cmt_path file_data_opt -> + match file_data_opt with + | None -> [] + | Some file_data -> + (* Include all source files from file_deps (NOT the CMT path) *) + let file_set = + FileDeps.builder_files file_data.DceFileProcessing.file_deps + in + FileSet.fold (fun f acc -> (f, ()) :: acc) file_set []) + () + in + + (* Extract exception_refs from cross_file_items for ReactiveExceptionRefs *) + let exception_refs_collection = + Reactive.flatMap ~name:"exception_refs_collection" cross_file_items + ~f:(fun _path items -> + items.CrossFileItems.exception_refs + |> List.map (fun (r : CrossFileItems.exception_ref) -> + (r.exception_path, r.loc_from))) + () + in + + (* Create reactive type-label dependencies *) + let type_deps = + ReactiveTypeDeps.create ~decls + ~report_types_dead_only_in_interface: + DeadCommon.Config.reportTypesDeadOnlyInInterface + in + + (* Create reactive exception refs resolution *) + let exception_refs = + ReactiveExceptionRefs.create ~decls + ~exception_refs:exception_refs_collection + in + + { + decls; + annotations; + value_refs_from; + type_refs_from; + cross_file_items; + file_deps_map; + files; + type_deps; + exception_refs; + } + +(** {1 Conversion to solver-ready format} *) + +(** Convert reactive decls to Declarations.t for solver *) +let freeze_decls (t : t) : Declarations.t = + let result = PosHash.create 256 in + Reactive.iter (fun pos decl -> PosHash.replace result pos decl) t.decls; + Declarations.create_from_hashtbl result + +(** Convert reactive annotations to FileAnnotations.t for solver *) +let freeze_annotations (t : t) : FileAnnotations.t = + let result = PosHash.create 256 in + Reactive.iter (fun pos ann -> PosHash.replace result pos ann) t.annotations; + FileAnnotations.create_from_hashtbl result + +(** Convert reactive refs to References.t for solver. + Includes type-label deps and exception refs from reactive computations. *) +let freeze_refs (t : t) : References.t = + let value_refs_from = PosHash.create 256 in + let type_refs_from = PosHash.create 256 in + + (* Helper to add to refs_from hashtable *) + let add_to_from tbl posFrom posTo = + let existing = + match PosHash.find_opt tbl posFrom with + | Some s -> s + | None -> PosSet.empty + in + PosHash.replace tbl posFrom (PosSet.add posTo existing) + in + + (* Merge per-file value refs_from *) + Reactive.iter + (fun posFrom posToSet -> + PosSet.iter + (fun posTo -> add_to_from value_refs_from posFrom posTo) + posToSet) + t.value_refs_from; + + (* Merge per-file type refs_from *) + Reactive.iter + (fun posFrom posToSet -> + PosSet.iter + (fun posTo -> add_to_from type_refs_from posFrom posTo) + posToSet) + t.type_refs_from; + + (* Add type-label dependency refs from all sources *) + let add_type_refs_from reactive = + Reactive.iter + (fun posFrom posToSet -> + PosSet.iter + (fun posTo -> add_to_from type_refs_from posFrom posTo) + posToSet) + reactive + in + add_type_refs_from t.type_deps.all_type_refs_from; + + (* Add exception refs (to value refs_from) *) + Reactive.iter + (fun posFrom posToSet -> + PosSet.iter + (fun posTo -> add_to_from value_refs_from posFrom posTo) + posToSet) + t.exception_refs.resolved_refs_from; + + References.create ~value_refs_from ~type_refs_from + +(** Collect all cross-file items *) +let collect_cross_file_items (t : t) : CrossFileItems.t = + let exception_refs = ref [] in + let optional_arg_calls = ref [] in + let function_refs = ref [] in + Reactive.iter + (fun _path items -> + exception_refs := items.CrossFileItems.exception_refs @ !exception_refs; + optional_arg_calls := + items.CrossFileItems.optional_arg_calls @ !optional_arg_calls; + function_refs := items.CrossFileItems.function_refs @ !function_refs) + t.cross_file_items; + { + CrossFileItems.exception_refs = !exception_refs; + optional_arg_calls = !optional_arg_calls; + function_refs = !function_refs; + } + +(** Convert reactive file deps to FileDeps.t for solver. + Includes file deps from exception refs. *) +let freeze_file_deps (t : t) : FileDeps.t = + let files = + let result = ref FileSet.empty in + Reactive.iter (fun path () -> result := FileSet.add path !result) t.files; + !result + in + let deps = FileDeps.FileHash.create 256 in + Reactive.iter + (fun from_file to_files -> + FileDeps.FileHash.replace deps from_file to_files) + t.file_deps_map; + (* Add file deps from exception refs - iterate value_refs_from *) + Reactive.iter + (fun posFrom posToSet -> + PosSet.iter + (fun posTo -> + let from_file = posFrom.Lexing.pos_fname in + let to_file = posTo.Lexing.pos_fname in + if from_file <> to_file then + let existing = + match FileDeps.FileHash.find_opt deps from_file with + | Some s -> s + | None -> FileSet.empty + in + FileDeps.FileHash.replace deps from_file + (FileSet.add to_file existing)) + posToSet) + t.exception_refs.resolved_refs_from; + FileDeps.create ~files ~deps diff --git a/analysis/reanalyze/src/ReactiveMerge.mli b/analysis/reanalyze/src/ReactiveMerge.mli new file mode 100644 index 0000000000..181c37a695 --- /dev/null +++ b/analysis/reanalyze/src/ReactiveMerge.mli @@ -0,0 +1,64 @@ +(** Reactive merge of per-file DCE data into global collections. + + Given a reactive collection of (path, file_data), this creates derived + reactive collections that automatically update when source files change. + + {2 Example} + + {[ + (* Create reactive file collection *) + let files = ReactiveAnalysis.create ~config in + + (* Process files *) + ReactiveAnalysis.process_files ~collection:files ~config paths; + + (* Create reactive merge from processed file data *) + let merged = ReactiveMerge.create (ReactiveAnalysis.to_collection files) in + + (* Access derived collections *) + Reactive.iter (fun pos decl -> ...) merged.decls; + + (* Or freeze for solver *) + let decls = ReactiveMerge.freeze_decls merged in + ]} *) + +(** {1 Types} *) + +type t = { + decls: (Lexing.position, Decl.t) Reactive.t; + annotations: (Lexing.position, FileAnnotations.annotated_as) Reactive.t; + value_refs_from: (Lexing.position, PosSet.t) Reactive.t; + (** Value refs: source -> targets *) + type_refs_from: (Lexing.position, PosSet.t) Reactive.t; + (** Type refs: source -> targets *) + cross_file_items: (string, CrossFileItems.t) Reactive.t; + file_deps_map: (string, FileSet.t) Reactive.t; + files: (string, unit) Reactive.t; + (* Reactive type/exception dependencies *) + type_deps: ReactiveTypeDeps.t; + exception_refs: ReactiveExceptionRefs.t; +} +(** All derived reactive collections from per-file data *) + +(** {1 Creation} *) + +val create : (string, DceFileProcessing.file_data option) Reactive.t -> t +(** Create reactive merge from a file data collection. + All derived collections update automatically when source changes. *) + +(** {1 Conversion to solver-ready format} *) + +val freeze_decls : t -> Declarations.t +(** Convert reactive decls to Declarations.t for solver *) + +val freeze_annotations : t -> FileAnnotations.t +(** Convert reactive annotations to FileAnnotations.t for solver *) + +val freeze_refs : t -> References.t +(** Convert reactive refs to References.t for solver *) + +val collect_cross_file_items : t -> CrossFileItems.t +(** Collect all cross-file items *) + +val freeze_file_deps : t -> FileDeps.t +(** Convert reactive file deps to FileDeps.t for solver *) diff --git a/analysis/reanalyze/src/ReactiveSolver.ml b/analysis/reanalyze/src/ReactiveSolver.ml new file mode 100644 index 0000000000..009d64d3fd --- /dev/null +++ b/analysis/reanalyze/src/ReactiveSolver.ml @@ -0,0 +1,348 @@ +(** Reactive dead code solver. + + Reactive pipeline: decls + live + annotations → dead_decls, live_decls, dead_modules, + dead_decls_by_file, issues_by_file, incorrect_dead_decls, dead_module_issues + + Current status: + - All collections are reactive (zero recomputation on cache hit for unchanged files) + - dead_decls, live_decls = decls partitioned by liveness (reactive join) + - dead_modules = modules with dead decls but no live decls (reactive anti-join) + - dead_decls_by_file = dead decls grouped by file (reactive flatMap with merge) + - value_refs_from_by_file = refs grouped by source file (reactive flatMap with merge) + - issues_by_file = per-file issue generation (reactive flatMap) + - incorrect_dead_decls = live decls with @dead annotation (reactive join) + - dead_module_issues = dead_modules joined with modules_with_reported (reactive join) + - is_pos_live uses reactive live collection (no resolvedDead mutation) + - shouldReport callback replaces report field mutation (no mutation needed) + - isInsideReportedValue is per-file only, so files are independent + - hasRefBelow uses on-demand search: O(total_refs) per dead decl (cross-file refs count as "below") + + All issues now match between reactive and non-reactive modes (380 on deadcode test): + - Dead code issues: 362 (Exception:2, Module:31, Type:87, Value:233, ValueWithSideEffects:8) + - Incorrect @dead: 1 + - Optional args: 18 (Redundant:6, Unused:12) *) + +type t = { + decls: (Lexing.position, Decl.t) Reactive.t; + live: (Lexing.position, unit) Reactive.t; + dead_decls: (Lexing.position, Decl.t) Reactive.t; + live_decls: (Lexing.position, Decl.t) Reactive.t; + annotations: (Lexing.position, FileAnnotations.annotated_as) Reactive.t; + value_refs_from: (Lexing.position, PosSet.t) Reactive.t option; + dead_modules: (Name.t, Location.t * string) Reactive.t; + (** Modules where all declarations are dead. Value is (loc, fileName). Reactive anti-join. *) + dead_decls_by_file: (string, Decl.t list) Reactive.t; + (** Dead declarations grouped by file. Reactive per-file grouping. *) + issues_by_file: (string, Issue.t list * Name.t list) Reactive.t; + (** Dead code issues grouped by file. Reactive per-file issue generation. + First component: value/type/exception issues. + Second component: modules with at least one reported value (for module issue generation). *) + incorrect_dead_decls: (Lexing.position, Decl.t) Reactive.t; + (** Live declarations with @dead annotation. Reactive join of live_decls + annotations. *) + dead_module_issues: (Name.t, Issue.t) Reactive.t; + (** Dead module issues. Reactive join of dead_modules + modules_with_reported. *) + config: DceConfig.t; +} + +(** Extract module name from a declaration *) +let decl_module_name (decl : Decl.t) : Name.t = + decl.path |> DcePath.toModuleName ~isType:(decl.declKind |> Decl.Kind.isType) + +let create ~(decls : (Lexing.position, Decl.t) Reactive.t) + ~(live : (Lexing.position, unit) Reactive.t) + ~(annotations : (Lexing.position, FileAnnotations.annotated_as) Reactive.t) + ~(value_refs_from : (Lexing.position, PosSet.t) Reactive.t option) + ~(config : DceConfig.t) : t = + (* dead_decls = decls where NOT in live (reactive join) *) + let dead_decls = + Reactive.join ~name:"solver.dead_decls" decls live + ~key_of:(fun pos _decl -> pos) + ~f:(fun pos decl live_opt -> + match live_opt with + | None -> [(pos, decl)] + | Some () -> []) + () + in + + (* live_decls = decls where in live (reactive join) *) + let live_decls = + Reactive.join ~name:"solver.live_decls" decls live + ~key_of:(fun pos _decl -> pos) + ~f:(fun pos decl live_opt -> + match live_opt with + | Some () -> [(pos, decl)] + | None -> []) + () + in + + (* Reactive dead modules: modules with dead decls but no live decls *) + let dead_modules = + if not config.DceConfig.run.transitive then + (* Dead modules only reported in transitive mode *) + Reactive.flatMap ~name:"solver.dead_modules_empty" dead_decls + ~f:(fun _ _ -> []) + () + else + (* modules_with_dead: (moduleName, (loc, fileName)) for each module with dead decls *) + let modules_with_dead = + Reactive.flatMap ~name:"solver.modules_with_dead" dead_decls + ~f:(fun _pos decl -> + [ + ( decl_module_name decl, + (decl.moduleLoc, decl.pos.Lexing.pos_fname) ); + ]) + ~merge:(fun v1 _v2 -> v1) (* keep first *) + () + in + (* modules_with_live: (moduleName, ()) for each module with live decls *) + let modules_with_live = + Reactive.flatMap ~name:"solver.modules_with_live" live_decls + ~f:(fun _pos decl -> [(decl_module_name decl, ())]) + () + in + (* Anti-join: modules in dead but not in live *) + Reactive.join ~name:"solver.dead_modules" modules_with_dead + modules_with_live + ~key_of:(fun modName (_loc, _fileName) -> modName) + ~f:(fun modName (loc, fileName) live_opt -> + match live_opt with + | None -> [(modName, (loc, fileName))] (* dead: no live decls *) + | Some () -> []) (* live: has at least one live decl *) + () + in + + (* Reactive per-file grouping of dead declarations *) + let dead_decls_by_file = + Reactive.flatMap ~name:"solver.dead_decls_by_file" dead_decls + ~f:(fun _pos decl -> [(decl.pos.Lexing.pos_fname, [decl])]) + ~merge:(fun decls1 decls2 -> decls1 @ decls2) + () + in + + let transitive = config.DceConfig.run.transitive in + + (* Reactive per-file issues - recomputed when dead_decls_by_file changes. + Returns (file, (value_issues, modules_with_reported_values)) where + modules_with_reported_values are modules that have at least one reported dead value. + Module issues are generated separately in collect_issues using dead_modules. *) + let issues_by_file = + Reactive.flatMap ~name:"solver.issues_by_file" dead_decls_by_file + ~f:(fun file decls -> + (* Track modules that have reported values *) + let modules_with_values : (Name.t, unit) Hashtbl.t = Hashtbl.create 8 in + (* shouldReport checks annotations reactively *) + let shouldReport (decl : Decl.t) = + match Reactive.get annotations decl.pos with + | Some FileAnnotations.Live -> false + | Some FileAnnotations.GenType -> false + | Some FileAnnotations.Dead -> false + | None -> true + in + (* Don't emit module issues here - track modules for later *) + let checkModuleDead ~fileName:_ moduleName = + Hashtbl.replace modules_with_values moduleName (); + None (* Module issues generated separately *) + in + (* hasRefBelow: check if decl has any ref from "below" (including cross-file refs) *) + let hasRefBelow = + if transitive then fun _ -> false + else + match value_refs_from with + | None -> fun _ -> false + | Some refs_from -> + (* Must iterate ALL refs since cross-file refs also count as "below" *) + DeadCommon.make_hasRefBelow ~transitive + ~iter_value_refs_from:(fun f -> Reactive.iter f refs_from) + in + (* Sort within file and generate issues *) + let sorted = decls |> List.fast_sort Decl.compareForReporting in + let reporting_ctx = DeadCommon.ReportingContext.create () in + let file_issues = + sorted + |> List.concat_map (fun decl -> + DeadCommon.reportDeclaration ~config ~hasRefBelow + ~checkModuleDead ~shouldReport reporting_ctx decl) + in + let modules_list = + Hashtbl.fold (fun m () acc -> m :: acc) modules_with_values [] + in + [(file, (file_issues, modules_list))]) + () + in + + (* Reactive incorrect @dead: live decls with @dead annotation *) + let incorrect_dead_decls = + Reactive.join ~name:"solver.incorrect_dead_decls" live_decls annotations + ~key_of:(fun pos _decl -> pos) + ~f:(fun pos decl ann_opt -> + match ann_opt with + | Some FileAnnotations.Dead -> [(pos, decl)] + | _ -> []) + () + in + + (* Reactive modules_with_reported: modules that have at least one reported dead value *) + let modules_with_reported = + Reactive.flatMap ~name:"solver.modules_with_reported" issues_by_file + ~f:(fun _file (_issues, modules_list) -> + List.map (fun m -> (m, ())) modules_list) + () + in + + (* Reactive dead module issues: dead_modules joined with modules_with_reported *) + let dead_module_issues = + Reactive.join ~name:"solver.dead_module_issues" dead_modules + modules_with_reported + ~key_of:(fun moduleName (_loc, _fileName) -> moduleName) + ~f:(fun moduleName (loc, fileName) has_reported_opt -> + match has_reported_opt with + | Some () -> + let loc = + if loc.Location.loc_ghost then + let pos = + { + Lexing.pos_fname = fileName; + pos_lnum = 0; + pos_bol = 0; + pos_cnum = 0; + } + in + {Location.loc_start = pos; loc_end = pos; loc_ghost = false} + else loc + in + [(moduleName, AnalysisResult.make_dead_module_issue ~loc ~moduleName)] + | None -> []) + () + in + + { + decls; + live; + dead_decls; + live_decls; + annotations; + value_refs_from; + dead_modules; + dead_decls_by_file; + issues_by_file; + incorrect_dead_decls; + dead_module_issues; + config; + } + +(** Check if a module is dead using reactive collection. Returns issue if dead. + Uses reported_modules set to avoid duplicate reports. *) +let check_module_dead ~(dead_modules : (Name.t, Location.t * string) Reactive.t) + ~(reported_modules : (Name.t, unit) Hashtbl.t) ~fileName:pos_fname + moduleName : Issue.t option = + if Hashtbl.mem reported_modules moduleName then None + else + match Reactive.get dead_modules moduleName with + | Some (loc, fileName) -> + Hashtbl.replace reported_modules moduleName (); + let loc = + if loc.Location.loc_ghost then + (* Use fileName from dead_modules, fallback to pos_fname *) + let fname = if fileName <> "" then fileName else pos_fname in + let pos = + {Lexing.pos_fname = fname; pos_lnum = 0; pos_bol = 0; pos_cnum = 0} + in + {Location.loc_start = pos; loc_end = pos; loc_ghost = false} + else loc + in + Some (AnalysisResult.make_dead_module_issue ~loc ~moduleName) + | None -> None + +(** Collect issues from reactive issues_by_file. + Only iterates the pre-computed reactive issues collection. + Deduplicates module issues across files. *) +let collect_issues ~(t : t) ~(config : DceConfig.t) + ~(ann_store : AnnotationStore.t) : Issue.t list = + ignore (config, ann_store); + (* config is stored in t, ann_store used via reactive annotations *) + let t0 = Unix.gettimeofday () in + (* Track reported modules to avoid duplicates across files *) + let reported_modules = Hashtbl.create 64 in + + (* Collect incorrect @dead issues from reactive collection *) + let incorrect_dead_issues = ref [] in + Reactive.iter + (fun _pos (decl : Decl.t) -> + let issue = + DeadCommon.makeDeadIssue ~decl + ~message:" is annotated @dead but is live" + Issue.IncorrectDeadAnnotation + in + (* Check if module is dead using reactive collection *) + check_module_dead ~dead_modules:t.dead_modules ~reported_modules + ~fileName:decl.pos.pos_fname (decl_module_name decl) + |> Option.iter (fun mod_issue -> + incorrect_dead_issues := mod_issue :: !incorrect_dead_issues); + incorrect_dead_issues := issue :: !incorrect_dead_issues) + t.incorrect_dead_decls; + let t1 = Unix.gettimeofday () in + + (* Collect issues from reactive issues_by_file *) + let num_files = ref 0 in + let dead_issues = ref [] in + Reactive.iter + (fun _file (file_issues, _modules_list) -> + incr num_files; + dead_issues := file_issues @ !dead_issues) + t.issues_by_file; + let t2 = Unix.gettimeofday () in + + (* Collect module issues from reactive dead_module_issues *) + let module_issues = ref [] in + Reactive.iter + (fun _moduleName issue -> module_issues := issue :: !module_issues) + t.dead_module_issues; + let t3 = Unix.gettimeofday () in + + if !Cli.timing then + Printf.eprintf + " collect_issues: incorrect_dead=%.2fms iter_issues=%.2fms \ + iter_modules=%.2fms (%d files)\n" + ((t1 -. t0) *. 1000.0) + ((t2 -. t1) *. 1000.0) + ((t3 -. t2) *. 1000.0) + !num_files; + + List.rev !incorrect_dead_issues @ !module_issues @ !dead_issues + +(** Iterate over live declarations *) +let iter_live_decls ~(t : t) (f : Decl.t -> unit) : unit = + Reactive.iter (fun _pos decl -> f decl) t.live_decls + +(** Check if a position is live using the reactive collection. + Returns true if pos is not a declaration (matches non-reactive behavior). *) +let is_pos_live ~(t : t) (pos : Lexing.position) : bool = + match Reactive.get t.decls pos with + | None -> true (* not a declaration, assume live *) + | Some _ -> Reactive.get t.live pos <> None + +(** Stats *) +let stats ~(t : t) : int * int = + (Reactive.length t.dead_decls, Reactive.length t.live_decls) + +(** Print reactive collection update statistics *) +let print_stats ~(t : t) : unit = + let print name (c : _ Reactive.t) = + let s = Reactive.stats c in + Printf.eprintf + " %s: recv=%d/%d +%d -%d | emit=%d/%d +%d -%d | runs=%d len=%d\n" name + s.deltas_received s.entries_received s.adds_received s.removes_received + s.deltas_emitted s.entries_emitted s.adds_emitted s.removes_emitted + s.process_count (Reactive.length c) + in + Printf.eprintf "ReactiveSolver stats (recv=d/e/+/- emit=d/e/+/- runs):\n"; + print "dead_decls" t.dead_decls; + print "live_decls" t.live_decls; + print "dead_modules" t.dead_modules; + print "dead_decls_by_file" t.dead_decls_by_file; + print "issues_by_file" t.issues_by_file; + print "incorrect_dead_decls" t.incorrect_dead_decls; + print "dead_module_issues" t.dead_module_issues; + match t.value_refs_from with + | Some refs -> print "value_refs_from" refs + | None -> () diff --git a/analysis/reanalyze/src/ReactiveSolver.mli b/analysis/reanalyze/src/ReactiveSolver.mli new file mode 100644 index 0000000000..0c5e5e1d0f --- /dev/null +++ b/analysis/reanalyze/src/ReactiveSolver.mli @@ -0,0 +1,33 @@ +(** Reactive dead code solver. + + Reactive pipeline: decls + live → dead_decls, live_decls + Issue generation uses DeadCommon.reportDeclaration for correct filtering. + + O(dead_decls + live_decls), not O(all_decls). *) + +type t + +val create : + decls:(Lexing.position, Decl.t) Reactive.t -> + live:(Lexing.position, unit) Reactive.t -> + annotations:(Lexing.position, FileAnnotations.annotated_as) Reactive.t -> + value_refs_from:(Lexing.position, PosSet.t) Reactive.t option -> + config:DceConfig.t -> + t + +val collect_issues : + t:t -> config:DceConfig.t -> ann_store:AnnotationStore.t -> Issue.t list +(** Collect issues. O(dead_decls + live_decls). *) + +val iter_live_decls : t:t -> (Decl.t -> unit) -> unit +(** Iterate over live declarations *) + +val is_pos_live : t:t -> Lexing.position -> bool +(** Check if a position is live using the reactive collection. + Returns true if pos is not a declaration (matches non-reactive behavior). *) + +val stats : t:t -> int * int +(** (dead, live) counts *) + +val print_stats : t:t -> unit +(** Print update statistics for all reactive collections *) diff --git a/analysis/reanalyze/src/ReactiveTypeDeps.ml b/analysis/reanalyze/src/ReactiveTypeDeps.ml new file mode 100644 index 0000000000..5fd0694405 --- /dev/null +++ b/analysis/reanalyze/src/ReactiveTypeDeps.ml @@ -0,0 +1,247 @@ +(** Reactive type-label dependencies. + + Expresses the type-label dependency computation as a reactive pipeline: + 1. decls -> decl_by_path (index by path) + 2. decl_by_path -> same_path_refs (connect duplicates at same path) + 3. decl_by_path + impl_decls -> cross_file_refs (connect impl<->intf) + + When declarations change, only affected refs are recomputed. *) + +(** {1 Helper types} *) + +type decl_info = { + pos: Lexing.position; + pos_end: Lexing.position; + path: DcePath.t; + is_interface: bool; +} +(** Simplified decl info for type-label processing *) + +let decl_to_info (decl : Decl.t) : decl_info option = + match decl.declKind with + | RecordLabel | VariantCase -> + let is_interface = + match List.rev decl.path with + | [] -> true + | moduleNameTag :: _ -> ( + try (moduleNameTag |> Name.toString).[0] <> '+' with _ -> true) + in + Some {pos = decl.pos; pos_end = decl.posEnd; path = decl.path; is_interface} + | _ -> None + +(** {1 Reactive Collections} *) + +type t = { + decl_by_path: (DcePath.t, decl_info list) Reactive.t; + (* refs_to direction: target -> sources *) + same_path_refs: (Lexing.position, PosSet.t) Reactive.t; + cross_file_refs: (Lexing.position, PosSet.t) Reactive.t; + all_type_refs: (Lexing.position, PosSet.t) Reactive.t; + impl_to_intf_refs_path2: (Lexing.position, PosSet.t) Reactive.t; + intf_to_impl_refs: (Lexing.position, PosSet.t) Reactive.t; + (* refs_from direction: source -> targets (for forward solver) *) + all_type_refs_from: (Lexing.position, PosSet.t) Reactive.t; +} +(** All reactive collections for type-label dependencies *) + +(** Create reactive type-label dependency collections from a decls collection *) +let create ~(decls : (Lexing.position, Decl.t) Reactive.t) + ~(report_types_dead_only_in_interface : bool) : t = + (* Step 1: Index decls by path *) + let decl_by_path = + Reactive.flatMap ~name:"type_deps.decl_by_path" decls + ~f:(fun _pos decl -> + match decl_to_info decl with + | Some info -> [(info.path, [info])] + | None -> []) + ~merge:List.append () + in + + (* Step 2: Same-path refs - connect all decls at the same path *) + let same_path_refs = + Reactive.flatMap ~name:"type_deps.same_path_refs" decl_by_path + ~f:(fun _path decls -> + match decls with + | [] | [_] -> [] + | first :: rest -> + (* Connect each decl to the first one (and vice-versa if needed). + Original: extendTypeDependencies loc loc0 adds posTo=loc, posFrom=loc0 + So: posTo=other, posFrom=first *) + rest + |> List.concat_map (fun other -> + (* Always add: other -> first (posTo=other, posFrom=first) *) + let refs = [(other.pos, PosSet.singleton first.pos)] in + if report_types_dead_only_in_interface then refs + else + (* Also add: first -> other (posTo=first, posFrom=other) *) + (first.pos, PosSet.singleton other.pos) :: refs)) + ~merge:PosSet.union () + in + + (* Step 3: Cross-file refs - connect impl decls to intf decls *) + (* First, extract impl decls that need to look up intf *) + let impl_decls = + Reactive.flatMap ~name:"type_deps.impl_decls" decls + ~f:(fun _pos decl -> + match decl_to_info decl with + | Some info when not info.is_interface -> ( + match info.path with + | [] -> [] + | typeLabelName :: pathToType -> + (* Try two intf paths *) + let path_1 = pathToType |> DcePath.moduleToInterface in + let path_2 = path_1 |> DcePath.typeToInterface in + let intf_path1 = typeLabelName :: path_1 in + let intf_path2 = typeLabelName :: path_2 in + [(info.pos, (info, intf_path1, intf_path2))]) + | _ -> []) + () + in + + (* Join impl decls with decl_by_path to find intf. + Original: extendTypeDependencies loc loc1 where loc=impl, loc1=intf + adds posTo=impl, posFrom=intf *) + let impl_to_intf_refs = + Reactive.join ~name:"type_deps.impl_to_intf_refs" impl_decls decl_by_path + ~key_of:(fun _pos (_, intf_path1, _) -> intf_path1) + ~f:(fun _pos (info, _intf_path1, _intf_path2) intf_decls_opt -> + match intf_decls_opt with + | Some (intf_info :: _) -> + (* Found at path1: posTo=impl, posFrom=intf *) + let refs = [(info.pos, PosSet.singleton intf_info.pos)] in + if report_types_dead_only_in_interface then refs + else + (* Also: posTo=intf, posFrom=impl *) + (intf_info.pos, PosSet.singleton info.pos) :: refs + | _ -> []) + ~merge:PosSet.union () + in + + (* Second join for path2 fallback *) + let impl_needing_path2 = + Reactive.join ~name:"type_deps.impl_needing_path2" impl_decls decl_by_path + ~key_of:(fun _pos (_, intf_path1, _) -> intf_path1) + ~f:(fun pos (info, _intf_path1, intf_path2) intf_decls_opt -> + match intf_decls_opt with + | Some (_ :: _) -> [] (* Found at path1, skip *) + | _ -> [(pos, (info, intf_path2))]) + () + in + + let impl_to_intf_refs_path2 = + Reactive.join ~name:"type_deps.impl_to_intf_refs_path2" impl_needing_path2 + decl_by_path + ~key_of:(fun _pos (_, intf_path2) -> intf_path2) + ~f:(fun _pos (info, _) intf_decls_opt -> + match intf_decls_opt with + | Some (intf_info :: _) -> + (* posTo=impl, posFrom=intf *) + let refs = [(info.pos, PosSet.singleton intf_info.pos)] in + if report_types_dead_only_in_interface then refs + else (intf_info.pos, PosSet.singleton info.pos) :: refs + | _ -> []) + ~merge:PosSet.union () + in + + (* Also handle intf -> impl direction. + Original: extendTypeDependencies loc1 loc where loc=impl, loc1=intf + adds posTo=impl, posFrom=intf (note: same direction!) + The intf->impl code in original only runs when isInterface=true, + and the lookup is for finding the impl. *) + let intf_decls = + Reactive.flatMap ~name:"type_deps.intf_decls" decls + ~f:(fun _pos decl -> + match decl_to_info decl with + | Some info when info.is_interface -> ( + match info.path with + | [] -> [] + | typeLabelName :: pathToType -> + let impl_path = + typeLabelName :: DcePath.moduleToImplementation pathToType + in + [(info.pos, (info, impl_path))]) + | _ -> []) + () + in + + let intf_to_impl_refs = + Reactive.join ~name:"type_deps.intf_to_impl_refs" intf_decls decl_by_path + ~key_of:(fun _pos (_, impl_path) -> impl_path) + ~f:(fun _pos (intf_info, _) impl_decls_opt -> + match impl_decls_opt with + | Some (impl_info :: _) -> + (* Original: extendTypeDependencies loc1 loc where loc1=intf, loc=impl + But wait, looking at the original code more carefully: + + if isInterface then + match find_one path1 with + | None -> () + | Some loc1 -> + extendTypeDependencies ~config ~refs loc1 loc; + if not Config.reportTypesDeadOnlyInInterface then + extendTypeDependencies ~config ~refs loc loc1 + + Here loc is the current intf decl, loc1 is the found impl. + So extendTypeDependencies loc1 loc means posTo=loc1=impl, posFrom=loc=intf + *) + let refs = [(impl_info.pos, PosSet.singleton intf_info.pos)] in + if report_types_dead_only_in_interface then refs + else (intf_info.pos, PosSet.singleton impl_info.pos) :: refs + | _ -> []) + ~merge:PosSet.union () + in + + (* Cross-file refs are the combination of: + - impl_to_intf_refs (path1 matches) + - impl_to_intf_refs_path2 (path2 fallback) + - intf_to_impl_refs *) + let cross_file_refs = impl_to_intf_refs in + + (* All type refs = same_path_refs + all cross-file sources. + We expose these separately and merge in freeze_refs. *) + let all_type_refs = same_path_refs in + + (* Create refs_from by combining and inverting all refs_to sources. + We use a single flatMap that iterates all sources once. *) + let all_type_refs_from = + (* Combine all refs_to sources using union *) + let combined_refs_to = + let u1 = + Reactive.union ~name:"type_deps.u1" same_path_refs cross_file_refs + ~merge:PosSet.union () + in + let u2 = + Reactive.union ~name:"type_deps.u2" u1 impl_to_intf_refs_path2 + ~merge:PosSet.union () + in + Reactive.union ~name:"type_deps.combined_refs_to" u2 intf_to_impl_refs + ~merge:PosSet.union () + in + (* Invert the combined refs_to to refs_from *) + Reactive.flatMap ~name:"type_deps.all_type_refs_from" combined_refs_to + ~f:(fun posTo posFromSet -> + PosSet.elements posFromSet + |> List.map (fun posFrom -> (posFrom, PosSet.singleton posTo))) + ~merge:PosSet.union () + in + + { + decl_by_path; + same_path_refs; + cross_file_refs; + all_type_refs; + impl_to_intf_refs_path2; + intf_to_impl_refs; + all_type_refs_from; + } + +(** {1 Freezing for solver} *) + +(** Add all type refs to a References.builder *) +let add_to_refs_builder (t : t) ~(refs : References.builder) : unit = + Reactive.iter + (fun posTo posFromSet -> + PosSet.iter + (fun posFrom -> References.add_type_ref refs ~posTo ~posFrom) + posFromSet) + t.all_type_refs diff --git a/analysis/reanalyze/src/ReactiveTypeDeps.mli b/analysis/reanalyze/src/ReactiveTypeDeps.mli new file mode 100644 index 0000000000..ac6c9ff2aa --- /dev/null +++ b/analysis/reanalyze/src/ReactiveTypeDeps.mli @@ -0,0 +1,72 @@ +(** Reactive type-label dependencies. + + Expresses the type-label dependency computation as a reactive pipeline. + When declarations change, only affected refs are recomputed. + + {2 Pipeline} + + {[ + decls + |> (flatMap) decl_by_path (* index by path *) + |> (flatMap) same_path_refs (* connect same-path duplicates *) + | + +-> (join) cross_file_refs (* connect impl <-> intf *) + | + +-> all_type_refs (* combined refs *) + ]} + + {2 Example} + + {[ + let reactive_decls = ReactiveMerge.create ... in + let type_deps = ReactiveTypeDeps.create + ~decls:reactive_decls.decls + ~report_types_dead_only_in_interface:true + in + (* Type refs update automatically when decls change *) + ReactiveTypeDeps.add_to_refs_builder type_deps ~refs:my_refs_builder + ]} *) + +(** {1 Types} *) + +type t = { + decl_by_path: (DcePath.t, decl_info list) Reactive.t; + (* refs_to direction: target -> sources *) + same_path_refs: (Lexing.position, PosSet.t) Reactive.t; + cross_file_refs: (Lexing.position, PosSet.t) Reactive.t; + all_type_refs: (Lexing.position, PosSet.t) Reactive.t; + impl_to_intf_refs_path2: (Lexing.position, PosSet.t) Reactive.t; + intf_to_impl_refs: (Lexing.position, PosSet.t) Reactive.t; + (* refs_from direction: source -> targets (for forward solver) *) + all_type_refs_from: (Lexing.position, PosSet.t) Reactive.t; +} +(** Reactive type-label dependency collections *) + +and decl_info = { + pos: Lexing.position; + pos_end: Lexing.position; + path: DcePath.t; + is_interface: bool; +} +(** Simplified decl info for type-label processing *) + +(** {1 Creation} *) + +val create : + decls:(Lexing.position, Decl.t) Reactive.t -> + report_types_dead_only_in_interface:bool -> + t +(** Create reactive type-label dependencies from a decls collection. + + When the [decls] collection changes, type refs automatically update. + + [report_types_dead_only_in_interface] controls whether refs are bidirectional + (false) or only intf->impl (true). *) + +(** {1 Freezing} *) + +val add_to_refs_builder : t -> refs:References.builder -> unit +(** Add all computed type refs to a References.builder. + + Call this after processing files to get the current type refs. + The builder will contain all type-label dependency refs. *) diff --git a/analysis/reanalyze/src/Reanalyze.ml b/analysis/reanalyze/src/Reanalyze.ml index 006454247d..4b6860e86f 100644 --- a/analysis/reanalyze/src/Reanalyze.ml +++ b/analysis/reanalyze/src/Reanalyze.ml @@ -204,22 +204,39 @@ let processFilesParallel ~config ~numDomains (cmtFilePaths : string list) : (** Process all cmt files and return results for DCE and Exception analysis. Conceptually: map process_cmt_file over all files. *) -let processCmtFiles ~config ~cmtRoot : all_files_result = - let cmtFilePaths = collectCmtFilePaths ~cmtRoot in - let numDomains = - match !Cli.parallel with - | n when n > 0 -> n - | n when n < 0 -> - (* Auto-detect: use recommended domain count (number of cores) *) - Domain.recommended_domain_count () - | _ -> 0 +let processCmtFiles ~config ~cmtRoot ~reactive_collection ~skip_file : + all_files_result = + let cmtFilePaths = + let all = collectCmtFilePaths ~cmtRoot in + match skip_file with + | Some should_skip -> List.filter (fun p -> not (should_skip p)) all + | None -> all in - if numDomains > 0 then ( - if !Cli.timing then - Printf.eprintf "Using %d parallel domains for %d files\n%!" numDomains - (List.length cmtFilePaths); - processFilesParallel ~config ~numDomains cmtFilePaths) - else processFilesSequential ~config cmtFilePaths + (* Reactive mode: use incremental processing that skips unchanged files *) + match reactive_collection with + | Some collection -> + let result = + ReactiveAnalysis.process_files ~collection ~config cmtFilePaths + in + { + dce_data_list = result.dce_data_list; + exception_results = result.exception_results; + } + | None -> + let numDomains = + match !Cli.parallel with + | n when n > 0 -> n + | n when n < 0 -> + (* Auto-detect: use recommended domain count (number of cores) *) + Domain.recommended_domain_count () + | _ -> 0 + in + if numDomains > 0 then ( + if !Cli.timing then + Printf.eprintf "Using %d parallel domains for %d files\n%!" numDomains + (List.length cmtFilePaths); + processFilesParallel ~config ~numDomains cmtFilePaths) + else processFilesSequential ~config cmtFilePaths (* Shuffle a list using Fisher-Yates algorithm *) let shuffle_list lst = @@ -233,10 +250,17 @@ let shuffle_list lst = done; Array.to_list arr -let runAnalysis ~dce_config ~cmtRoot = +let runAnalysis ~dce_config ~cmtRoot ~reactive_collection ~reactive_merge + ~reactive_liveness ~reactive_solver ~skip_file = (* Map: process each file -> list of file_data *) let {dce_data_list; exception_results} = - processCmtFiles ~config:dce_config ~cmtRoot + processCmtFiles ~config:dce_config ~cmtRoot ~reactive_collection ~skip_file + in + (* Get exception results from reactive collection if available *) + let exception_results = + match reactive_collection with + | Some collection -> ReactiveAnalysis.collect_exception_results collection + | None -> exception_results in (* Optionally shuffle for order-independence testing *) let dce_data_list = @@ -251,82 +275,197 @@ let runAnalysis ~dce_config ~cmtRoot = let analysis_result = if dce_config.DceConfig.run.dce then (* Merging phase: combine all builders -> immutable data *) - let annotations, decls, cross_file, refs, file_deps = + let ann_store, decl_store, cross_file_store, ref_store = Timing.time_phase `Merging (fun () -> - let annotations = - FileAnnotations.merge_all - (dce_data_list - |> List.map (fun fd -> fd.DceFileProcessing.annotations)) - in - let decls = - Declarations.merge_all - (dce_data_list - |> List.map (fun fd -> fd.DceFileProcessing.decls)) + (* Use reactive merge if available, otherwise list-based merge *) + let ann_store, decl_store, cross_file_store = + match reactive_merge with + | Some merged -> + (* Reactive mode: use stores directly, skip freeze! *) + ( AnnotationStore.of_reactive merged.ReactiveMerge.annotations, + DeclarationStore.of_reactive merged.ReactiveMerge.decls, + CrossFileItemsStore.of_reactive + merged.ReactiveMerge.cross_file_items ) + | None -> + (* Non-reactive mode: freeze into data, wrap in store *) + let decls = + Declarations.merge_all + (dce_data_list + |> List.map (fun fd -> fd.DceFileProcessing.decls)) + in + ( AnnotationStore.of_frozen + (FileAnnotations.merge_all + (dce_data_list + |> List.map (fun fd -> fd.DceFileProcessing.annotations) + )), + DeclarationStore.of_frozen decls, + CrossFileItemsStore.of_frozen + (CrossFileItems.merge_all + (dce_data_list + |> List.map (fun fd -> fd.DceFileProcessing.cross_file))) + ) in - let cross_file = - CrossFileItems.merge_all - (dce_data_list - |> List.map (fun fd -> fd.DceFileProcessing.cross_file)) + (* Compute refs. + In reactive mode, use stores directly (skip freeze!). + In non-reactive mode, use the imperative processing. *) + let ref_store = + match reactive_merge with + | Some merged -> + (* Reactive mode: use stores directly *) + ReferenceStore.of_reactive + ~value_refs_from:merged.value_refs_from + ~type_refs_from:merged.type_refs_from + ~type_deps:merged.type_deps + ~exception_refs:merged.exception_refs + | None -> + (* Non-reactive mode: build refs imperatively *) + (* Need Declarations.t for type deps processing *) + let decls = + match decl_store with + | DeclarationStore.Frozen d -> d + | DeclarationStore.Reactive _ -> + failwith + "unreachable: non-reactive path with reactive store" + in + (* Need CrossFileItems.t for exception refs processing *) + let cross_file = + match cross_file_store with + | CrossFileItemsStore.Frozen cfi -> cfi + | CrossFileItemsStore.Reactive _ -> + failwith + "unreachable: non-reactive path with reactive store" + in + let refs_builder = References.create_builder () in + let file_deps_builder = FileDeps.create_builder () in + (match reactive_collection with + | Some collection -> + ReactiveAnalysis.iter_file_data collection (fun fd -> + References.merge_into_builder + ~from:fd.DceFileProcessing.refs ~into:refs_builder; + FileDeps.merge_into_builder + ~from:fd.DceFileProcessing.file_deps + ~into:file_deps_builder) + | None -> + dce_data_list + |> List.iter (fun fd -> + References.merge_into_builder + ~from:fd.DceFileProcessing.refs ~into:refs_builder; + FileDeps.merge_into_builder + ~from:fd.DceFileProcessing.file_deps + ~into:file_deps_builder)); + (* Compute type-label dependencies after merge *) + DeadType.process_type_label_dependencies ~config:dce_config + ~decls ~refs:refs_builder; + let find_exception = + DeadException.find_exception_from_decls decls + in + (* Process cross-file exception refs *) + CrossFileItems.process_exception_refs cross_file + ~refs:refs_builder ~file_deps:file_deps_builder + ~find_exception ~config:dce_config; + (* Freeze refs for solver *) + let refs = References.freeze_builder refs_builder in + ReferenceStore.of_frozen refs in - (* Merge refs and file_deps into builders for cross-file items processing *) - let refs_builder = References.create_builder () in - let file_deps_builder = FileDeps.create_builder () in - dce_data_list - |> List.iter (fun fd -> - References.merge_into_builder ~from:fd.DceFileProcessing.refs - ~into:refs_builder; - FileDeps.merge_into_builder - ~from:fd.DceFileProcessing.file_deps - ~into:file_deps_builder); - (* Compute type-label dependencies after merge *) - DeadType.process_type_label_dependencies ~config:dce_config ~decls - ~refs:refs_builder; - let find_exception = - DeadException.find_exception_from_decls decls - in - (* Process cross-file exception refs *) - CrossFileItems.process_exception_refs cross_file ~refs:refs_builder - ~file_deps:file_deps_builder ~find_exception ~config:dce_config; - (* Freeze refs and file_deps for solver *) - let refs = References.freeze_builder refs_builder in - let file_deps = FileDeps.freeze_builder file_deps_builder in - (annotations, decls, cross_file, refs, file_deps)) + (ann_store, decl_store, cross_file_store, ref_store)) in (* Solving phase: run the solver and collect issues *) Timing.time_phase `Solving (fun () -> - let empty_optional_args_state = OptionalArgsState.create () in - let analysis_result_core = - DeadCommon.solveDead ~annotations ~decls ~refs ~file_deps - ~optional_args_state:empty_optional_args_state ~config:dce_config - ~checkOptionalArg:(fun - ~optional_args_state:_ ~annotations:_ ~config:_ _ -> []) - in - (* Compute liveness-aware optional args state *) - let is_live pos = - match Declarations.find_opt decls pos with - | Some decl -> Decl.isLive decl - | None -> true - in - let optional_args_state = - CrossFileItems.compute_optional_args_state cross_file ~decls - ~is_live - in - (* Collect optional args issues only for live declarations *) - let optional_args_issues = - Declarations.fold - (fun _pos decl acc -> - if Decl.isLive decl then - let issues = - DeadOptionalArgs.check ~optional_args_state ~annotations - ~config:dce_config decl - in - List.rev_append issues acc - else acc) - decls [] - |> List.rev - in - Some - (AnalysisResult.add_issues analysis_result_core optional_args_issues)) + match reactive_solver with + | Some solver -> + (* Reactive solver: iterate dead_decls + live_decls *) + let t0 = Unix.gettimeofday () in + let dead_code_issues = + ReactiveSolver.collect_issues ~t:solver ~config:dce_config + ~ann_store + in + let t1 = Unix.gettimeofday () in + (* Collect optional args issues from live declarations *) + let optional_args_issues = + match reactive_merge with + | Some merged -> + (* Create CrossFileItemsStore from reactive collection *) + let cross_file_store = + CrossFileItemsStore.of_reactive + merged.ReactiveMerge.cross_file_items + in + (* Compute optional args state using reactive liveness check. + Uses ReactiveSolver.is_pos_live which checks the reactive live collection + instead of mutable resolvedDead field. *) + let is_live pos = ReactiveSolver.is_pos_live ~t:solver pos in + let find_decl pos = + Reactive.get merged.ReactiveMerge.decls pos + in + let optional_args_state = + CrossFileItemsStore.compute_optional_args_state + cross_file_store ~find_decl ~is_live + in + (* Iterate live declarations and check for optional args issues *) + let issues = ref [] in + ReactiveSolver.iter_live_decls ~t:solver (fun decl -> + let decl_issues = + DeadOptionalArgs.check ~optional_args_state ~ann_store + ~config:dce_config decl + in + issues := List.rev_append decl_issues !issues); + List.rev !issues + | None -> [] + in + let t2 = Unix.gettimeofday () in + let all_issues = dead_code_issues @ optional_args_issues in + let num_dead, num_live = ReactiveSolver.stats ~t:solver in + if !Cli.timing then ( + Printf.eprintf + " ReactiveSolver: dead_code=%.3fms opt_args=%.3fms (dead=%d, \ + live=%d, issues=%d)\n" + ((t1 -. t0) *. 1000.0) + ((t2 -. t1) *. 1000.0) + num_dead num_live (List.length all_issues); + (match reactive_liveness with + | Some liveness -> ReactiveLiveness.print_stats ~t:liveness + | None -> ()); + ReactiveSolver.print_stats ~t:solver); + if !Cli.mermaid then + Printf.eprintf "\n%s\n" (Reactive.to_mermaid ()); + Some (AnalysisResult.add_issues AnalysisResult.empty all_issues) + | None -> + (* Non-reactive path: use old solver with optional args *) + let empty_optional_args_state = OptionalArgsState.create () in + let analysis_result_core = + DeadCommon.solveDead ~ann_store ~decl_store ~ref_store + ~optional_args_state:empty_optional_args_state + ~config:dce_config + ~checkOptionalArg:(fun + ~optional_args_state:_ ~ann_store:_ ~config:_ _ -> []) + in + (* Compute liveness-aware optional args state *) + let is_live pos = + match DeclarationStore.find_opt decl_store pos with + | Some decl -> Decl.isLive decl + | None -> true + in + let optional_args_state = + CrossFileItemsStore.compute_optional_args_state cross_file_store + ~find_decl:(DeclarationStore.find_opt decl_store) + ~is_live + in + (* Collect optional args issues only for live declarations *) + let optional_args_issues = + DeclarationStore.fold + (fun _pos decl acc -> + if Decl.isLive decl then + let issues = + DeadOptionalArgs.check ~optional_args_state ~ann_store + ~config:dce_config decl + in + List.rev_append issues acc + else acc) + decl_store [] + |> List.rev + in + Some + (AnalysisResult.add_issues analysis_result_core + optional_args_issues)) else None in (* Reporting phase *) @@ -345,14 +484,186 @@ let runAnalysis ~dce_config ~cmtRoot = let runAnalysisAndReport ~cmtRoot = Log_.Color.setup (); Timing.enabled := !Cli.timing; - Timing.reset (); if !Cli.json then EmitJson.start (); let dce_config = DceConfig.current () in - runAnalysis ~dce_config ~cmtRoot; - Log_.Stats.report ~config:dce_config; - Log_.Stats.clear (); - if !Cli.json then EmitJson.finish (); - Timing.report () + let numRuns = max 1 !Cli.runs in + (* Create reactive collection once, reuse across runs *) + let reactive_collection = + if !Cli.reactive then Some (ReactiveAnalysis.create ~config:dce_config) + else None + in + (* Create reactive merge once if reactive mode is enabled. + This automatically updates when reactive_collection changes. *) + let reactive_merge = + match reactive_collection with + | Some collection -> + let file_data_collection = + ReactiveAnalysis.to_file_data_collection collection + in + Some (ReactiveMerge.create file_data_collection) + | None -> None + in + (* Create reactive liveness. This is created before files are processed, + so it receives deltas as files are processed incrementally. *) + let reactive_liveness = + match reactive_merge with + | Some merged -> Some (ReactiveLiveness.create ~merged) + | None -> None + in + (* Create reactive solver once - sets up the reactive pipeline: + decls + live → dead_decls → issues + All downstream collections update automatically when inputs change. *) + let reactive_solver = + match (reactive_merge, reactive_liveness) with + | Some merged, Some liveness_result -> + (* Pass value_refs_from for hasRefBelow (needed when transitive=false) *) + let value_refs_from = + if dce_config.DceConfig.run.transitive then None + else Some merged.ReactiveMerge.value_refs_from + in + Some + (ReactiveSolver.create ~decls:merged.ReactiveMerge.decls + ~live:liveness_result.ReactiveLiveness.live + ~annotations:merged.ReactiveMerge.annotations ~value_refs_from + ~config:dce_config) + | _ -> None + in + (* Collect CMT file paths once for churning *) + let cmtFilePaths = + if !Cli.churn > 0 then Some (collectCmtFilePaths ~cmtRoot) else None + in + (* Track previous issue count for diff reporting *) + let prev_issue_count = ref 0 in + (* Track currently removed files (to add them back on next run) *) + let removed_files = ref [] in + (* Set of removed files for filtering in processCmtFiles *) + let removed_set = Hashtbl.create 64 in + (* Aggregate stats for churn mode *) + let churn_times = ref [] in + let issues_added_list = ref [] in + let issues_removed_list = ref [] in + for run = 1 to numRuns do + Timing.reset (); + (* Clear stats at start of each run to avoid accumulation *) + if run > 1 then Log_.Stats.clear (); + (* Print run header first *) + if numRuns > 1 && !Cli.timing then + Printf.eprintf "\n=== Run %d/%d ===\n%!" run numRuns; + (* Churn: alternate between remove and add phases *) + (if !Cli.churn > 0 then + match (reactive_collection, cmtFilePaths) with + | Some collection, Some paths -> + Reactive.reset_stats (); + if run > 1 && !removed_files <> [] then ( + (* Add back previously removed files *) + let to_add = !removed_files in + removed_files := []; + (* Clear removed set so these files get processed again *) + List.iter (fun p -> Hashtbl.remove removed_set p) to_add; + let t0 = Unix.gettimeofday () in + let processed = + ReactiveFileCollection.process_files_batch + (collection + : ReactiveAnalysis.t + :> (_, _) ReactiveFileCollection.t) + to_add + in + let elapsed = Unix.gettimeofday () -. t0 in + Timing.add_churn_time elapsed; + churn_times := elapsed :: !churn_times; + if !Cli.timing then ( + Printf.eprintf " Added back %d files (%.3fs)\n%!" processed + elapsed; + (match reactive_liveness with + | Some liveness -> ReactiveLiveness.print_stats ~t:liveness + | None -> ()); + match reactive_solver with + | Some solver -> ReactiveSolver.print_stats ~t:solver + | None -> ())) + else if run > 1 then ( + (* Remove new random files *) + let numChurn = min !Cli.churn (List.length paths) in + let shuffled = shuffle_list paths in + let to_remove = List.filteri (fun i _ -> i < numChurn) shuffled in + removed_files := to_remove; + (* Mark as removed so processCmtFiles skips them *) + List.iter (fun p -> Hashtbl.replace removed_set p ()) to_remove; + let t0 = Unix.gettimeofday () in + let removed = + ReactiveFileCollection.remove_batch + (collection + : ReactiveAnalysis.t + :> (_, _) ReactiveFileCollection.t) + to_remove + in + let elapsed = Unix.gettimeofday () -. t0 in + Timing.add_churn_time elapsed; + churn_times := elapsed :: !churn_times; + if !Cli.timing then ( + Printf.eprintf " Removed %d files (%.3fs)\n%!" removed elapsed; + (match reactive_liveness with + | Some liveness -> ReactiveLiveness.print_stats ~t:liveness + | None -> ()); + match reactive_solver with + | Some solver -> ReactiveSolver.print_stats ~t:solver + | None -> ())) + | _ -> ()); + (* Skip removed files in reactive mode *) + let skip_file = + if Hashtbl.length removed_set > 0 then + Some (fun path -> Hashtbl.mem removed_set path) + else None + in + runAnalysis ~dce_config ~cmtRoot ~reactive_collection ~reactive_merge + ~reactive_liveness ~reactive_solver ~skip_file; + (* Report issue count with diff *) + let current_count = Log_.Stats.get_issue_count () in + if !Cli.churn > 0 then ( + let diff = current_count - !prev_issue_count in + (* Track added/removed separately *) + if run > 1 then + if diff > 0 then + issues_added_list := float_of_int diff :: !issues_added_list + else if diff < 0 then + issues_removed_list := float_of_int (-diff) :: !issues_removed_list; + let diff_str = + if run = 1 then "" + else if diff >= 0 then Printf.sprintf " (+%d)" diff + else Printf.sprintf " (%d)" diff + in + Log_.Stats.report ~config:dce_config; + if !Cli.timing then + Printf.eprintf " Total issues: %d%s\n%!" current_count diff_str; + prev_issue_count := current_count) + else if run = numRuns then + (* Only report on last run for non-churn mode *) + Log_.Stats.report ~config:dce_config; + Log_.Stats.clear (); + Timing.report () + done; + (* Print aggregate churn stats *) + if !Cli.churn > 0 && !Cli.timing && List.length !churn_times > 0 then ( + let calc_stats lst = + if lst = [] then (0.0, 0.0) + else + let n = float_of_int (List.length lst) in + let sum = List.fold_left ( +. ) 0.0 lst in + let mean = sum /. n in + let variance = + List.fold_left (fun acc x -> acc +. ((x -. mean) ** 2.0)) 0.0 lst /. n + in + (mean, sqrt variance) + in + let time_mean, time_std = calc_stats !churn_times in + let added_mean, added_std = calc_stats !issues_added_list in + let removed_mean, removed_std = calc_stats !issues_removed_list in + Printf.eprintf "\n=== Churn Summary ===\n"; + Printf.eprintf " Churn operations: %d\n" (List.length !churn_times); + Printf.eprintf " Churn time: mean=%.3fs std=%.3fs\n" time_mean time_std; + Printf.eprintf " Issues added: mean=%.0f std=%.0f\n" added_mean added_std; + Printf.eprintf " Issues removed: mean=%.0f std=%.0f\n" removed_mean + removed_std); + if !Cli.json then EmitJson.finish () let cli () = let analysisKindSet = ref false in @@ -463,6 +774,20 @@ let cli () = "n Process files in parallel using n domains (0 = sequential, default; \ -1 = auto-detect cores)" ); ("-timing", Set Cli.timing, "Report internal timing of analysis phases"); + ( "-mermaid", + Set Cli.mermaid, + "Output Mermaid diagram of reactive pipeline" ); + ( "-reactive", + Set Cli.reactive, + "Use reactive analysis (caches processed file_data, skips unchanged \ + files)" ); + ( "-runs", + Int (fun n -> Cli.runs := n), + "n Run analysis n times (for benchmarking cache effectiveness)" ); + ( "-churn", + Int (fun n -> Cli.churn := n), + "n Remove and re-add n random files between runs (tests incremental \ + correctness)" ); ("-version", Unit versionAndExit, "Show version information and exit"); ("--version", Unit versionAndExit, "Show version information and exit"); ] diff --git a/analysis/reanalyze/src/ReferenceStore.ml b/analysis/reanalyze/src/ReferenceStore.ml new file mode 100644 index 0000000000..86b6d4afff --- /dev/null +++ b/analysis/reanalyze/src/ReferenceStore.ml @@ -0,0 +1,37 @@ +(** Abstraction over reference storage. + + Allows the solver to work with either: + - [Frozen]: Traditional [References.t] (copied from reactive) + - [Reactive]: Direct reactive collections (no copy, zero-cost on warm runs) + + This eliminates the O(N) freeze step when using reactive mode. *) + +type t = + | Frozen of References.t + | Reactive of { + (* Per-file refs_from *) + value_refs_from: (Lexing.position, PosSet.t) Reactive.t; + type_refs_from: (Lexing.position, PosSet.t) Reactive.t; + (* Type deps refs_from *) + all_type_refs_from: (Lexing.position, PosSet.t) Reactive.t; + (* Exception refs_from *) + exception_value_refs_from: (Lexing.position, PosSet.t) Reactive.t; + } + +let of_frozen refs = Frozen refs + +let of_reactive ~value_refs_from ~type_refs_from ~type_deps ~exception_refs = + Reactive + { + value_refs_from; + type_refs_from; + all_type_refs_from = type_deps.ReactiveTypeDeps.all_type_refs_from; + exception_value_refs_from = + exception_refs.ReactiveExceptionRefs.resolved_refs_from; + } + +(** Get underlying References.t for Frozen stores. Used for forward liveness. *) +let get_refs_opt t = + match t with + | Frozen refs -> Some refs + | Reactive _ -> None diff --git a/analysis/reanalyze/src/ReferenceStore.mli b/analysis/reanalyze/src/ReferenceStore.mli new file mode 100644 index 0000000000..cfc266fad3 --- /dev/null +++ b/analysis/reanalyze/src/ReferenceStore.mli @@ -0,0 +1,24 @@ +(** Abstraction over reference storage. + + Allows the solver to work with either: + - [Frozen]: Traditional [References.t] (copied from reactive) + - [Reactive]: Direct reactive collections (no copy, zero-cost on warm runs) + + This eliminates the O(N) freeze step when using reactive mode. *) + +type t +(** Abstract reference store *) + +val of_frozen : References.t -> t +(** Wrap a frozen [References.t] *) + +val of_reactive : + value_refs_from:(Lexing.position, PosSet.t) Reactive.t -> + type_refs_from:(Lexing.position, PosSet.t) Reactive.t -> + type_deps:ReactiveTypeDeps.t -> + exception_refs:ReactiveExceptionRefs.t -> + t +(** Wrap reactive collections directly (no copy) *) + +val get_refs_opt : t -> References.t option +(** Get underlying References.t for Frozen stores. Returns None for Reactive. *) diff --git a/analysis/reanalyze/src/References.ml b/analysis/reanalyze/src/References.ml index 632dbd7861..fd324ed434 100644 --- a/analysis/reanalyze/src/References.ml +++ b/analysis/reanalyze/src/References.ml @@ -2,56 +2,83 @@ Two types are provided: - [builder] - mutable, for AST processing - - [t] - immutable, for solver (read-only access) *) + - [t] - immutable, for solver (read-only access) + + References are stored in refs_from direction only: + - refs_from: posFrom -> {posTo1, posTo2, ...} = what posFrom references + + This is what the forward liveness algorithm needs. *) (* Helper to add to a set in a hashtable *) let addSet h k v = let set = try PosHash.find h k with Not_found -> PosSet.empty in PosHash.replace h k (PosSet.add v set) -(* Helper to find a set in a hashtable *) -let findSet h k = try PosHash.find h k with Not_found -> PosSet.empty - -(* Internal representation: two hashtables *) +(* Internal representation: two hashtables (refs_from for value and type) *) type refs_table = PosSet.t PosHash.t -type builder = {value_refs: refs_table; type_refs: refs_table} +type builder = {value_refs_from: refs_table; type_refs_from: refs_table} -type t = {value_refs: refs_table; type_refs: refs_table} +type t = {value_refs_from: refs_table; type_refs_from: refs_table} (* ===== Builder API ===== *) let create_builder () : builder = - {value_refs = PosHash.create 256; type_refs = PosHash.create 256} + {value_refs_from = PosHash.create 256; type_refs_from = PosHash.create 256} let add_value_ref (builder : builder) ~posTo ~posFrom = - addSet builder.value_refs posTo posFrom + addSet builder.value_refs_from posFrom posTo let add_type_ref (builder : builder) ~posTo ~posFrom = - addSet builder.type_refs posTo posFrom + addSet builder.type_refs_from posFrom posTo let merge_into_builder ~(from : builder) ~(into : builder) = PosHash.iter (fun pos refs -> - refs |> PosSet.iter (fun fromPos -> addSet into.value_refs pos fromPos)) - from.value_refs; + refs |> PosSet.iter (fun toPos -> addSet into.value_refs_from pos toPos)) + from.value_refs_from; PosHash.iter (fun pos refs -> - refs |> PosSet.iter (fun fromPos -> addSet into.type_refs pos fromPos)) - from.type_refs + refs |> PosSet.iter (fun toPos -> addSet into.type_refs_from pos toPos)) + from.type_refs_from let merge_all (builders : builder list) : t = let result = create_builder () in builders |> List.iter (fun builder -> merge_into_builder ~from:builder ~into:result); - {value_refs = result.value_refs; type_refs = result.type_refs} + { + value_refs_from = result.value_refs_from; + type_refs_from = result.type_refs_from; + } let freeze_builder (builder : builder) : t = (* Zero-copy freeze - builder should not be used after this *) - {value_refs = builder.value_refs; type_refs = builder.type_refs} + { + value_refs_from = builder.value_refs_from; + type_refs_from = builder.type_refs_from; + } + +(* ===== Builder extraction for reactive merge ===== *) + +let builder_value_refs_from_list (builder : builder) : + (Lexing.position * PosSet.t) list = + PosHash.fold + (fun pos refs acc -> (pos, refs) :: acc) + builder.value_refs_from [] + +let builder_type_refs_from_list (builder : builder) : + (Lexing.position * PosSet.t) list = + PosHash.fold + (fun pos refs acc -> (pos, refs) :: acc) + builder.type_refs_from [] + +let create ~value_refs_from ~type_refs_from : t = + {value_refs_from; type_refs_from} (* ===== Read-only API ===== *) -let find_value_refs (t : t) pos = findSet t.value_refs pos +let iter_value_refs_from (t : t) f = PosHash.iter f t.value_refs_from +let iter_type_refs_from (t : t) f = PosHash.iter f t.type_refs_from -let find_type_refs (t : t) pos = findSet t.type_refs pos +let value_refs_from_length (t : t) = PosHash.length t.value_refs_from +let type_refs_from_length (t : t) = PosHash.length t.type_refs_from diff --git a/analysis/reanalyze/src/References.mli b/analysis/reanalyze/src/References.mli index 05228b7b8e..84939aa2f1 100644 --- a/analysis/reanalyze/src/References.mli +++ b/analysis/reanalyze/src/References.mli @@ -4,8 +4,10 @@ - [builder] - mutable, for AST processing - [t] - immutable, for solver (read-only access) - References track which positions reference which declarations. - Both value references and type references are tracked. *) + References are stored in refs_from direction: + - refs_from: posFrom -> {targets it references} + + This is what the forward liveness algorithm needs. *) (** {2 Types} *) @@ -18,10 +20,14 @@ type builder (** {2 Builder API - for AST processing} *) val create_builder : unit -> builder + val add_value_ref : builder -> posTo:Lexing.position -> posFrom:Lexing.position -> unit +(** Add a value reference. *) + val add_type_ref : builder -> posTo:Lexing.position -> posFrom:Lexing.position -> unit +(** Add a type reference. *) val merge_into_builder : from:builder -> into:builder -> unit (** Merge one builder into another. *) @@ -32,7 +38,27 @@ val merge_all : builder list -> t val freeze_builder : builder -> t (** Convert builder to immutable t. Builder should not be used after this. *) -(** {2 Read-only API for t - for solver} *) +(** {2 Builder extraction for reactive merge} *) + +val builder_value_refs_from_list : builder -> (Lexing.position * PosSet.t) list +(** Extract value refs (posFrom -> targets) *) + +val builder_type_refs_from_list : builder -> (Lexing.position * PosSet.t) list +(** Extract type refs (posFrom -> targets) *) + +val create : + value_refs_from:PosSet.t PosHash.t -> type_refs_from:PosSet.t PosHash.t -> t +(** Create a References.t from hashtables *) + +(** {2 Read-only API - for liveness} *) + +val iter_value_refs_from : t -> (Lexing.position -> PosSet.t -> unit) -> unit +(** Iterate all value refs *) + +val iter_type_refs_from : t -> (Lexing.position -> PosSet.t -> unit) -> unit +(** Iterate all type refs *) + +(** {2 Length} *) -val find_value_refs : t -> Lexing.position -> PosSet.t -val find_type_refs : t -> Lexing.position -> PosSet.t +val value_refs_from_length : t -> int +val type_refs_from_length : t -> int diff --git a/analysis/reanalyze/src/Timing.ml b/analysis/reanalyze/src/Timing.ml index b9f739df6a..782b0d5399 100644 --- a/analysis/reanalyze/src/Timing.ml +++ b/analysis/reanalyze/src/Timing.ml @@ -3,6 +3,8 @@ let enabled = ref false type phase_times = { + (* Churn (file add/remove) *) + mutable churn: float; (* CMT processing sub-phases *) mutable file_loading: float; mutable result_collection: float; @@ -15,6 +17,7 @@ type phase_times = { let times = { + churn = 0.0; file_loading = 0.0; result_collection = 0.0; merging = 0.0; @@ -26,12 +29,15 @@ let times = let timing_mutex = Mutex.create () let reset () = + times.churn <- 0.0; times.file_loading <- 0.0; times.result_collection <- 0.0; times.merging <- 0.0; times.solving <- 0.0; times.reporting <- 0.0 +let add_churn_time t = times.churn <- times.churn +. t + let now () = Unix.gettimeofday () let time_phase phase_name f = @@ -54,24 +60,20 @@ let time_phase phase_name f = let report () = if !enabled then ( - (* NOTE about semantics: - - [file_loading] is treated as the WALL-CLOCK time for the overall - "CMT processing" phase (including per-file processing and any - synchronization). - - [result_collection] is an AGGREGATE metric across domains: time spent - in (and waiting on) the mutex-protected result merge/collection - section, summed across all worker domains. This may exceed wall-clock - time in parallel runs. - We do NOT add them together, otherwise we'd double-count. *) let cmt_total = times.file_loading in let analysis_total = times.merging +. times.solving in - let total = cmt_total +. analysis_total +. times.reporting in + let total = times.churn +. cmt_total +. analysis_total +. times.reporting in Printf.eprintf "\n=== Timing ===\n"; + if times.churn > 0.0 then + Printf.eprintf " Churn: %.3fs (%.1f%%)\n" times.churn + (100.0 *. times.churn /. total); Printf.eprintf " CMT processing: %.3fs (%.1f%%)\n" cmt_total (100.0 *. cmt_total /. total); - Printf.eprintf " - Wall clock: %.3fs\n" times.file_loading; - Printf.eprintf " - Result collection: %.3fms (aggregate)\n" - (1000.0 *. times.result_collection); + (* Only show parallel-specific timing when used *) + if times.result_collection > 0.0 then + Printf.eprintf + " - Parallel merge: %.3fms (aggregate across domains)\n" + (1000.0 *. times.result_collection); Printf.eprintf " Analysis: %.3fs (%.1f%%)\n" analysis_total (100.0 *. analysis_total /. total); Printf.eprintf " - Merging: %.3fms\n" (1000.0 *. times.merging); diff --git a/analysis/reanalyze/src/dune b/analysis/reanalyze/src/dune index e8b736446f..8431b0d52d 100644 --- a/analysis/reanalyze/src/dune +++ b/analysis/reanalyze/src/dune @@ -2,4 +2,4 @@ (name reanalyze) (flags (-w "+6+26+27+32+33+39")) - (libraries jsonlib ext ml str unix)) + (libraries reactive jsonlib ext ml str unix)) diff --git a/analysis/src/DceCommand.ml b/analysis/src/DceCommand.ml index 1578a66bb4..45d3e610a2 100644 --- a/analysis/src/DceCommand.ml +++ b/analysis/src/DceCommand.ml @@ -1,6 +1,8 @@ let command () = Reanalyze.RunConfig.dce (); let dce_config = Reanalyze.DceConfig.current () in - Reanalyze.runAnalysis ~dce_config ~cmtRoot:None; + Reanalyze.runAnalysis ~dce_config ~cmtRoot:None ~reactive_collection:None + ~reactive_merge:None ~reactive_liveness:None ~reactive_solver:None + ~skip_file:None; let issues = !Reanalyze.Log_.Stats.issues in Printf.printf "issues:%d\n" (List.length issues) diff --git a/docs/reactive_reanalyze_design.md b/docs/reactive_reanalyze_design.md new file mode 100644 index 0000000000..c19b0ac1c0 --- /dev/null +++ b/docs/reactive_reanalyze_design.md @@ -0,0 +1,469 @@ +# Reactive Reanalyze: Using skip-lite for Incremental Analysis + +## Executive Summary + +This document investigates how skip-lite's reactive collections can be used to create an analysis service that stays on and reacts to file changes, dramatically speeding up CMT processing for repeated analysis runs. + +**Key Insight**: The benchmark results from skip-lite show a **950x speedup** when processing only changed files vs. re-reading all files. Applied to reanalyze with ~4900 files (50 copies benchmark), this could reduce CMT processing from ~780ms to ~1-2ms for typical incremental changes. + +## Current Architecture + +### Reanalyze Processing Flow + +``` + ┌─────────────────┐ + │ Collect CMT │ + │ File Paths │ + └────────┬────────┘ + │ + ┌────────▼────────┐ + │ Load CMT Files │ ← 77% of time (~780ms) + │ (Cmt_format. │ + │ read_cmt) │ + └────────┬────────┘ + │ + ┌────────▼────────┐ + │ Process Each │ + │ File → file_data│ + └────────┬────────┘ + │ + ┌─────────────────┴─────────────────┐ + │ │ + ┌────────▼────────┐ ┌────────▼────────┐ + │ Merge Builders │ │ Exception │ + │ (annotations, │ │ Results │ + │ decls, refs, │ └─────────────────┘ + │ cross_file, │ + │ file_deps) │ ← 8% of time (~80ms) + └────────┬────────┘ + │ + ┌────────▼────────┐ + │ Solve (DCE, │ ← 15% of time (~150ms) + │ optional args) │ + └────────┬────────┘ + │ + ┌────────▼────────┐ + │ Report Issues │ ← <1% of time + └─────────────────┘ +``` + +### Current Bottleneck + +From the benchmark (50 copies, ~4900 files, 12 cores): + +| Phase | Sequential | Parallel | % of Total | +|-------|-----------|----------|------------| +| File loading | 779ms | 422ms | 77% / 64% | +| Merging | 81ms | 94ms | 8% / 14% | +| Solving | 146ms | 148ms | 15% / 22% | +| Total | 1007ms | 664ms | 100% | + +**CMT file loading is the dominant cost** because each file requires: +1. System call to open file +2. Reading marshalled data from disk +3. Unmarshalling into OCaml heap +4. AST traversal to extract analysis data + +## Proposed Architecture: Reactive Analysis Service + +### Design Goals + +1. **Persistent service** - Stay running and maintain state between analysis runs +2. **File watching** - React to file changes (create/modify/delete) +3. **Incremental updates** - Only process changed files +4. **Cached results** - Keep processed `file_data` in memory +5. **Fast iteration** - Sub-10ms response for typical edits + +### Integration with skip-lite + +skip-lite provides two key primitives: + +#### 1. `Marshal_cache` - Efficient CMT Loading + +```ocaml +(* Instead of Cmt_format.read_cmt which does file I/O every time *) +let load_cmt path = + Marshal_cache.with_unmarshalled_file path (fun cmt_infos -> + DceFileProcessing.process_cmt_file ~config ~file ~cmtFilePath cmt_infos + ) +``` + +**Benefits**: +- Memory-mapped, off-heap storage (not GC-scanned) +- LRU eviction for memory management +- Automatic invalidation on file change + +#### 2. `Reactive_file_collection` - Delta-Based Processing + +```ocaml +(* Create collection that maps CMT paths to processed file_data *) +let cmt_collection = Reactive_file_collection.create + ~process:(fun (cmt_infos : Cmt_format.cmt_infos) -> + (* This is called only when file changes *) + process_cmt_for_dce ~config cmt_infos + ) + +(* Initial load - process all files once *) +List.iter (Reactive_file_collection.add cmt_collection) all_cmt_paths + +(* On file watcher event - only process changed files *) +Reactive_file_collection.apply cmt_collection [ + Modified "lib/bs/src/MyModule.cmt"; + Modified "lib/bs/src/MyModule.cmti"; +] + +(* Get all processed data for analysis *) +let file_data_list = Reactive_file_collection.values cmt_collection +``` + +### Service Architecture + +``` +┌────────────────────────────────────────────────────────────────┐ +│ Reanalyze Service │ +├────────────────────────────────────────────────────────────────┤ +│ │ +│ ┌──────────────┐ ┌─────────────────────────────────┐ │ +│ │ File Watcher │─────▶│ Reactive_file_collection │ │ +│ │ (fswatch/ │ │ ┌───────────────────────────┐ │ │ +│ │ inotify) │ │ │ path → file_data cache │ │ │ +│ └──────────────┘ │ │ (backed by Marshal_cache) │ │ │ +│ │ └───────────────────────────┘ │ │ +│ └──────────┬──────────────────────┘ │ +│ │ │ +│ │ file_data_list │ +│ ▼ │ +│ ┌─────────────────────────────────┐ │ +│ │ Incremental Merge & Solve │ │ +│ │ (may be reactive in future) │ │ +│ └──────────┬──────────────────────┘ │ +│ │ │ +│ ▼ │ +│ ┌─────────────────────────────────┐ │ +│ │ Issues / Reports │ │ +│ └─────────────────────────────────┘ │ +│ │ +└────────────────────────────────────────────────────────────────┘ +``` + +### API Design + +```ocaml +module ReactiveReanalyze : sig + type t + (** A reactive analysis service *) + + val create : config:DceConfig.t -> project_root:string -> t + (** Create a new reactive analysis service *) + + val start : t -> unit + (** Start file watching and initial analysis *) + + val stop : t -> unit + (** Stop file watching *) + + val analyze : t -> AnalysisResult.t + (** Run analysis on current state. Fast if no files changed. *) + + val on_file_change : t -> string -> unit + (** Notify of a file change (for external file watchers) *) + + val apply_events : t -> Reactive_file_collection.event list -> unit + (** Apply batch of file events *) +end +``` + +## Performance Analysis + +### Expected Speedup + +| Scenario | Current | With skip-lite | Speedup | +|----------|---------|----------------|---------| +| Cold start (all files) | 780ms | 780ms | 1x | +| Warm cache, no changes | 780ms | ~20ms | **39x** | +| Single file changed | 780ms | ~2ms | **390x** | +| 10 files changed | 780ms | ~15ms | **52x** | + +### How skip-lite Achieves This + +1. **Marshal_cache.with_unmarshalled_if_changed**: + - Stats all files to check modification time (~20ms for 5000 files) + - Only unmarshals files that changed + - Returns `None` for unchanged files, `Some result` for changed + +2. **Reactive_file_collection**: + - Maintains hash table of processed values + - On `apply`, only processes files in the event list + - Iteration is O(n) but values are already computed + +### Memory Considerations + +| Data | Storage | GC Impact | +|------|---------|-----------| +| CMT file bytes | mmap (off-heap) | None | +| Unmarshalled cmt_infos | OCaml heap (temporary) | During callback only | +| Processed file_data | OCaml heap (cached) | Scanned by GC | + +For 5000 files with average 20KB each: +- mmap cache: ~100MB (off-heap, OS-managed) +- file_data cache: ~50MB (on-heap, estimate) + +## Implementation Plan + +### Phase 1: Integration Setup + +1. **Add skip-lite dependency** to dune/opam +2. **Create wrapper module** `CmtCache` that provides: + ```ocaml + val read_cmt : string -> Cmt_format.cmt_infos + (** Drop-in replacement for Cmt_format.read_cmt using Marshal_cache *) + ``` + +### Phase 2: Reactive Collection + +1. **Define file_data type** as the cached result type +2. **Create reactive collection** for CMT → file_data mapping +3. **Implement delta processing** that only reprocesses changed files + +### Phase 3: Analysis Service + +1. **File watching integration** (can use fswatch, inotify, or external watcher) +2. **Service loop** that waits for events and re-runs analysis +3. **LSP integration** (optional) for editor support + +### Phase 4: Incremental Merge & Solve (Future) + +The current merge and solve phases are relatively fast (22% of time), but could be made incremental in the future: + +- Track which declarations changed +- Incrementally update reference graph +- Re-solve only affected transitive closure + +## Prototype Implementation + +Here's a minimal prototype showing how to integrate `Reactive_file_collection`: + +```ocaml +(* reactive_analysis.ml *) + +module CmtCollection = struct + type file_data = DceFileProcessing.file_data + + let collection : file_data Reactive_file_collection.t option ref = ref None + + let init ~config ~cmt_paths = + let coll = Reactive_file_collection.create + ~process:(fun (cmt_infos : Cmt_format.cmt_infos) -> + (* Extract file context from cmt_infos *) + let source_path = + match cmt_infos.cmt_annots |> FindSourceFile.cmt with + | Some path -> path + | None -> failwith "No source file" + in + let module_name = Paths.getModuleName source_path in + let is_interface = match cmt_infos.cmt_annots with + | Cmt_format.Interface _ -> true + | _ -> false + in + let file : DceFileProcessing.file_context = { + source_path; module_name; is_interface + } in + let cmtFilePath = "" (* not used in process_cmt_file body *) in + DceFileProcessing.process_cmt_file ~config ~file ~cmtFilePath cmt_infos + ) + in + (* Initial load *) + List.iter (Reactive_file_collection.add coll) cmt_paths; + collection := Some coll; + coll + + let apply_events events = + match !collection with + | Some coll -> Reactive_file_collection.apply coll events + | None -> failwith "Collection not initialized" + + let get_all_file_data () = + match !collection with + | Some coll -> Reactive_file_collection.values coll + | None -> [] +end + +(* Modified Reanalyze.runAnalysis *) +let runAnalysisIncremental ~config ~events = + (* Apply only the changed files *) + CmtCollection.apply_events events; + + (* Get all file_data (instant - values already computed) *) + let file_data_list = CmtCollection.get_all_file_data () in + + (* Rest of analysis is same as before *) + let annotations, decls, cross_file, refs, file_deps = + merge_all_builders file_data_list + in + solve ~annotations ~decls ~refs ~file_deps ~config +``` + +## Testing Strategy + +1. **Correctness**: Verify reactive analysis produces same results as batch +2. **Performance**: Benchmark incremental updates vs full analysis +3. **Edge cases**: + - File deletion during analysis + - Rapid successive changes + - Build errors (incomplete CMT files) + +## Open Questions + +1. **Build system integration**: How to get file events from rewatch/ninja? +2. **CMT staleness**: What if build system is still writing CMT files? +3. **Multi-project**: How to handle monorepos with multiple rescript.json? +4. **Memory limits**: When to evict file_data from cache? + +## Integration Points + +### 1. Shared.tryReadCmt → Marshal_cache + +Current code in `analysis/src/Shared.ml`: +```ocaml +let tryReadCmt cmt = + if not (Files.exists cmt) then ( + Log.log ("Cmt file does not exist " ^ cmt); + None) + else + match Cmt_format.read_cmt cmt with + | exception ... -> None + | x -> Some x +``` + +With Marshal_cache: +```ocaml +let tryReadCmt cmt = + if not (Files.exists cmt) then ( + Log.log ("Cmt file does not exist " ^ cmt); + None) + else + try + Some (Marshal_cache.with_unmarshalled_file cmt Fun.id) + with Marshal_cache.Cache_error (_, msg) -> + Log.log ("Invalid cmt format " ^ cmt ^ ": " ^ msg); + None +``` + +### 2. Reanalyze.loadCmtFile → Reactive_file_collection + +Current code in `analysis/reanalyze/src/Reanalyze.ml`: +```ocaml +let loadCmtFile ~config cmtFilePath : cmt_file_result option = + let cmt_infos = Cmt_format.read_cmt cmtFilePath in + ... +``` + +With reactive collection: +```ocaml +(* Global reactive collection *) +let cmt_collection : cmt_file_result Reactive_file_collection.t option ref = ref None + +let init_collection ~config = + cmt_collection := Some (Reactive_file_collection.create + ~process:(fun (cmt_infos : Cmt_format.cmt_infos) -> + process_cmt_infos ~config cmt_infos + )) + +let loadCmtFile_reactive ~config cmtFilePath = + match !cmt_collection with + | Some coll -> Reactive_file_collection.get coll cmtFilePath + | None -> loadCmtFile ~config cmtFilePath (* fallback *) +``` + +### 3. File Watcher Integration + +The analysis server already has `DceCommand.ml`. We can extend it to a service: + +```ocaml +(* DceService.ml *) + +type t = { + config: Reanalyze.DceConfig.t; + collection: cmt_file_result Reactive_file_collection.t; + mutable last_result: Reanalyze.AnalysisResult.t option; +} + +let create ~project_root = + let config = Reanalyze.DceConfig.current () in + let cmt_paths = Reanalyze.collectCmtFilePaths ~cmtRoot:None in + let collection = Reactive_file_collection.create + ~process:(process_cmt_for_config ~config) + in + List.iter (Reactive_file_collection.add collection) cmt_paths; + { config; collection; last_result = None } + +let on_file_change t events = + Reactive_file_collection.apply t.collection events; + (* Invalidate cached result *) + t.last_result <- None + +let analyze t = + match t.last_result with + | Some result -> result (* Cached, no files changed *) + | None -> + let file_data_list = Reactive_file_collection.values t.collection in + let result = run_analysis_on_file_data ~config:t.config file_data_list in + t.last_result <- Some result; + result +``` + +### 4. Build System Integration (rewatch) + +Rewatch already watches for file changes. We can extend it to notify the analysis service: + +In `rewatch/src/watcher.rs`: +```rust +// After successful compilation of a module +if let Some(analysis_socket) = &state.analysis_socket { + analysis_socket.send(AnalysisEvent::Modified(cmt_path)); +} +``` + +Or via a Unix domain socket/named pipe that the analysis service listens on. + +## Dependency Setup + +Add to `analysis/dune`: +```dune +(library + (name analysis) + (libraries + ... + skip-lite.marshal_cache + skip-lite.reactive_file_collection)) +``` + +Add to `analysis.opam`: +```opam +depends: [ + ... + "skip-lite" {>= "0.1"} +] +``` + +## Conclusion + +Integrating skip-lite's reactive collections with reanalyze offers a path to **39-390x speedup** for incremental analysis. The key insight is that CMT file loading (77% of current time) can be eliminated for unchanged files, and the processed file_data can be cached. + +The implementation requires: +1. Adding skip-lite as a dependency +2. Wrapping CMT loading with Marshal_cache (immediate benefit: mmap caching) +3. Creating reactive collection for file_data (benefit: only process changed files) +4. Creating a service mode that watches for file changes (benefit: persistent state) + +The merge and solve phases (23% of time) remain unchanged initially, but could be made incremental in the future for even greater speedups. + +## Next Steps + +1. **Phase 0**: Add skip-lite as optional dependency (behind a feature flag) +2. **Phase 1**: Replace `Cmt_format.read_cmt` with `Marshal_cache` wrapper +3. **Phase 2**: Benchmark improvement from mmap caching alone +4. **Phase 3**: Implement `Reactive_file_collection` for file_data +5. **Phase 4**: Create analysis service with file watching +6. **Phase 5**: Integrate with rewatch for automatic updates + diff --git a/rewatch/src/cli.rs b/rewatch/src/cli.rs index 3b4604ce54..e0a0132773 100644 --- a/rewatch/src/cli.rs +++ b/rewatch/src/cli.rs @@ -197,9 +197,9 @@ pub struct AfterBuildArg { #[derive(Args, Debug, Clone, Copy)] pub struct CreateSourceDirsArg { - /// Create a source_dirs.json file at the root of the monorepo, needed for Reanalyze. - #[arg(short, long, default_value_t = false, num_args = 0..=1)] - pub create_sourcedirs: bool, + /// Deprecated: source_dirs.json is now always created. + #[arg(short, long, num_args = 0..=1, default_missing_value = "true", hide = true)] + pub create_sourcedirs: Option, } #[derive(Args, Debug, Clone, Copy)] @@ -488,11 +488,10 @@ impl Deref for AfterBuildArg { } } -impl Deref for CreateSourceDirsArg { - type Target = bool; - - fn deref(&self) -> &Self::Target { - &self.create_sourcedirs +impl CreateSourceDirsArg { + /// Returns true if the flag was explicitly passed on the command line. + pub fn was_explicitly_set(&self) -> bool { + self.create_sourcedirs.is_some() } } diff --git a/rewatch/src/main.rs b/rewatch/src/main.rs index 46bf248fbd..4c9ece6018 100644 --- a/rewatch/src/main.rs +++ b/rewatch/src/main.rs @@ -46,12 +46,18 @@ fn main() -> Result<()> { ); } + if build_args.create_sourcedirs.was_explicitly_set() { + log::warn!( + "`--create-sourcedirs` is deprecated: source_dirs.json is now always created. Please remove this flag from your command." + ); + } + match build::build( &build_args.filter, Path::new(&build_args.folder as &str), show_progress, build_args.no_timing, - *build_args.create_sourcedirs, + true, // create_sourcedirs is now always enabled plain_output, (*build_args.warn_error).clone(), ) { @@ -76,12 +82,18 @@ fn main() -> Result<()> { ); } + if watch_args.create_sourcedirs.was_explicitly_set() { + log::warn!( + "`--create-sourcedirs` is deprecated: source_dirs.json is now always created. Please remove this flag from your command." + ); + } + match watcher::start( &watch_args.filter, show_progress, &watch_args.folder, (*watch_args.after_build).clone(), - *watch_args.create_sourcedirs, + true, // create_sourcedirs is now always enabled plain_output, (*watch_args.warn_error).clone(), ) { diff --git a/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/Makefile b/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/Makefile index 27b4767f0a..da6be9b027 100644 --- a/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/Makefile +++ b/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/Makefile @@ -35,6 +35,33 @@ time: @echo "Parallel (auto-detect cores):" @dune exec rescript-editor-analysis -- reanalyze -config -ci -timing -parallel -1 2>&1 | grep -E "Analysis reported|=== Timing|CMT processing|File loading|Result collection|Analysis:|Merging|Solving|Reporting:|Total:" +# Benchmark with CMT cache +time-cache: generate build + @echo "=== Without cache ===" + @echo "Sequential:" + @dune exec rescript-editor-analysis -- reanalyze -config -ci -timing 2>&1 | grep -E "=== Timing|CMT processing|File loading|Total:" + @echo "" + @echo "=== With CMT cache (first run - cold) ===" + @echo "Sequential:" + @dune exec rescript-editor-analysis -- reanalyze -config -ci -timing -cmt-cache 2>&1 | grep -E "=== Timing|CMT processing|File loading|Total:" + @echo "" + @echo "=== With CMT cache (second run - warm) ===" + @echo "Sequential:" + @dune exec rescript-editor-analysis -- reanalyze -config -ci -timing -cmt-cache 2>&1 | grep -E "=== Timing|CMT processing|File loading|Total:" + @echo "" + @echo "=== With CMT cache + parallel (warm) ===" + @dune exec rescript-editor-analysis -- reanalyze -config -ci -timing -cmt-cache -parallel -1 2>&1 | grep -E "=== Timing|CMT processing|File loading|Total:" + +# Benchmark reactive mode (simulates repeated analysis) +time-reactive: generate build + @echo "=== Reactive mode benchmark ===" + @echo "" + @echo "Standard (baseline):" + @dune exec rescript-editor-analysis -- reanalyze -config -ci -timing 2>&1 | grep -E "=== Timing|CMT processing|File loading|Total:" + @echo "" + @echo "Reactive mode (3 runs - first is cold, subsequent are warm):" + @dune exec rescript-editor-analysis -- reanalyze -config -ci -timing -reactive -runs 3 >/dev/null + .DEFAULT_GOAL := benchmark -.PHONY: generate build clean benchmark time +.PHONY: generate build clean benchmark time time-cache time-reactive diff --git a/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/package.json b/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/package.json index fc8d9b2b70..f89de2fb09 100644 --- a/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/package.json +++ b/tests/analysis_tests/tests-reanalyze/deadcode-benchmark/package.json @@ -2,8 +2,8 @@ "name": "@tests/reanalyze-benchmark", "private": true, "scripts": { - "build": "rescript-legacy build", - "clean": "rescript-legacy clean" + "build": "rescript build", + "clean": "rescript clean" }, "dependencies": { "@rescript/react": "link:../../../dependencies/rescript-react", diff --git a/tests/analysis_tests/tests-reanalyze/deadcode/expected/deadcode.txt b/tests/analysis_tests/tests-reanalyze/deadcode/expected/deadcode.txt index f2dacf6bf8..7c0b83e43f 100644 --- a/tests/analysis_tests/tests-reanalyze/deadcode/expected/deadcode.txt +++ b/tests/analysis_tests/tests-reanalyze/deadcode/expected/deadcode.txt @@ -1805,733 +1805,1000 @@ addTypeReference DeadTypeTest.res:9:2 --> DeadTypeTest.resi:9:2 addValueReference TestDeadExn.res:1:7 --> DeadExn.res:1:0 -File References - - AutoAnnotate.res -->> - BootloaderResource.res -->> - BucklescriptAnnotations.res -->> - ComponentAsProp.res -->> React.res - CreateErrorHandler1.res -->> ErrorHandler.resi - CreateErrorHandler2.res -->> - DeadCodeImplementation.res -->> - DeadCodeInterface.res -->> - DeadExn.res -->> - DeadExn.resi -->> - DeadRT.res -->> - DeadRT.resi -->> - DeadTest.res -->> DeadValueTest.resi, DynamicallyLoadedComponent.res, ImmutableArray.resi, React.res - DeadTestBlacklist.res -->> - DeadTestWithInterface.res -->> - DeadTypeTest.res -->> - DeadTypeTest.resi -->> DeadTypeTest.res - DeadValueTest.res -->> - DeadValueTest.resi -->> DeadValueTest.res - Docstrings.res -->> - DynamicallyLoadedComponent.res -->> React.res - EmptyArray.res -->> - ErrorHandler.res -->> - ErrorHandler.resi -->> ErrorHandler.res - EverythingLiveHere.res -->> - FirstClassModules.res -->> - FirstClassModulesInterface.res -->> - FirstClassModulesInterface.resi -->> FirstClassModulesInterface.res - Hooks.res -->> ImportHookDefault.res, ImportHooks.res, React.res - IgnoreInterface.res -->> - IgnoreInterface.resi -->> - ImmutableArray.res -->> - ImmutableArray.resi -->> ImmutableArray.res - ImportHookDefault.res -->> - ImportHooks.res -->> - ImportIndex.res -->> - ImportJsValue.res -->> - ImportMyBanner.res -->> - InnerModuleTypes.res -->> - InnerModuleTypes.resi -->> - JSResource.res -->> - JsxV4.res -->> React.res - LetPrivate.res -->> - ModuleAliases.res -->> - ModuleAliases2.res -->> - ModuleExceptionBug.res -->> - NestedModules.res -->> - NestedModulesInSignature.res -->> - NestedModulesInSignature.resi -->> NestedModulesInSignature.res - Newsyntax.res -->> - Newton.res -->> - Opaque.res -->> - OptArg.res -->> - OptArg.resi -->> OptArg.res - OptionalArgsLiveDead.res -->> - Records.res -->> - References.res -->> - RepeatedLabel.res -->> - RequireCond.res -->> - Shadow.res -->> - TestDeadExn.res -->> DeadExn.res - TestEmitInnerModules.res -->> - TestFirstClassModules.res -->> - TestImmutableArray.res -->> ImmutableArray.resi - TestImport.res -->> - TestInnedModuleTypes.res -->> - TestModuleAliases.res -->> - TestOptArg.res -->> OptArg.resi - TestPromise.res -->> - ToSuppress.res -->> - TransitiveType1.res -->> - TransitiveType2.res -->> - TransitiveType3.res -->> - Tuples.res -->> - TypeParams1.res -->> - TypeParams2.res -->> - TypeParams3.res -->> - Types.res -->> - Unboxed.res -->> - Uncurried.res -->> - Unison.res -->> - UseImportJsValue.res -->> ImportJsValue.res - Variants.res -->> - VariantsWithPayload.res -->> - Dead VariantCase +AutoAnnotate.annotatedVariant.R4: 0 references () [0] - Dead VariantCase +AutoAnnotate.annotatedVariant.R2: 0 references () [0] - Dead RecordLabel +AutoAnnotate.r4.r4: 0 references () [0] - Dead RecordLabel +AutoAnnotate.r3.r3: 0 references () [0] - Dead RecordLabel +AutoAnnotate.r2.r2: 0 references () [0] - Dead RecordLabel +AutoAnnotate.record.variant: 0 references () [0] - Dead VariantCase +AutoAnnotate.variant.R: 0 references () [0] - Dead Value +BucklescriptAnnotations.+bar: 0 references () [1] - Dead Value +BucklescriptAnnotations.+f: 0 references () [0] - Live RecordLabel +ComponentAsProp.props.button: 1 references (_none_:1:-1) [0] - Live RecordLabel +ComponentAsProp.props.description: 1 references (_none_:1:-1) [0] - Live RecordLabel +ComponentAsProp.props.title: 1 references (_none_:1:-1) [0] - Live Value +ComponentAsProp.+make: 0 references () [0] - Live Value +CreateErrorHandler1.Error1.+notification: 1 references (ErrorHandler.resi:3:2) [0] - Live Value +CreateErrorHandler2.Error2.+notification: 1 references (ErrorHandler.resi:3:2) [0] - Live Value +DeadCodeImplementation.M.+x: 1 references (DeadCodeInterface.res:2:2) [0] - Dead Value +DeadRT.+emitModuleAccessPath: 0 references () [0] - Live VariantCase +DeadRT.moduleAccessPath.Kaboom: 1 references (DeadRT.res:11:16) [0] - Live VariantCase DeadRT.moduleAccessPath.Root: 1 references (DeadTest.res:98:16) [1] - Live VariantCase +DeadRT.moduleAccessPath.Root: 1 references (DeadRT.resi:2:2) [0] - Live VariantCase DeadRT.moduleAccessPath.Kaboom: 1 references (DeadRT.res:3:2) [0] - Dead RecordLabel +DeadTest.inlineRecord3.IR3.b: 0 references () [0] - Dead RecordLabel +DeadTest.inlineRecord3.IR3.a: 0 references () [0] - Dead VariantCase +DeadTest.inlineRecord3.IR3: 0 references () [0] - Dead RecordLabel +DeadTest.inlineRecord2.IR2.b: 0 references () [0] - Dead RecordLabel +DeadTest.inlineRecord2.IR2.a: 0 references () [0] - Dead VariantCase +DeadTest.inlineRecord2.IR2: 0 references () [0] - Dead Value +DeadTest.+_: 0 references () [0] - Live Value +DeadTest.+ira: 1 references (DeadTest.res:163:27) [0] - Live RecordLabel +DeadTest.inlineRecord.IR.e: 0 references () [0] - Dead RecordLabel +DeadTest.inlineRecord.IR.d: 0 references () [0] - Live RecordLabel +DeadTest.inlineRecord.IR.c: 1 references (DeadTest.res:163:7) [0] - Live RecordLabel +DeadTest.inlineRecord.IR.b: 1 references (DeadTest.res:163:35) [0] - Dead RecordLabel +DeadTest.inlineRecord.IR.a: 0 references () [0] - Live VariantCase +DeadTest.inlineRecord.IR: 1 references (DeadTest.res:163:20) [0] - Dead Value +DeadTest.+_: 0 references () [0] - Live Value +DeadTest.+deadIncorrect: 1 references (DeadTest.res:156:8) [0] - Dead RecordLabel +DeadTest.rc.a: 0 references () [0] - Dead Value +DeadTest.+funWithInnerVars: 0 references () [1] - Dead Value +DeadTest.+y: 0 references () [0] - Dead Value +DeadTest.+x: 0 references () [0] - Live VariantCase +DeadTest.WithInclude.t.A: 1 references (DeadTest.res:142:7) [1] - Live VariantCase +DeadTest.WithInclude.t.A: 1 references (DeadTest.res:134:11) [0] - Live Value +DeadTest.GloobLive.+globallyLive3: 0 references () [0] - Live Value +DeadTest.GloobLive.+globallyLive2: 0 references () [0] - Live Value +DeadTest.GloobLive.+globallyLive1: 0 references () [0] - Dead Value +DeadTest.+stringLengthNoSideEffects: 0 references () [0] - Dead Value +DeadTest.+theSideEffectIsLogging: 0 references () [0] - Live RecordLabel +DeadTest.props.s: 1 references (_none_:1:-1) [0] - Live Value +DeadTest.+make: 1 references (DeadTest.res:119:16) [0] - Dead Value +DeadTest.+deadRef: 0 references () [0] - Dead Value +DeadTest.+second: 0 references () [0] - Dead Value +DeadTest.+a3: 0 references () [0] - Dead Value +DeadTest.+a2: 0 references () [0] - Dead Value +DeadTest.+a1: 0 references () [0] - Dead Value +DeadTest.+zzz: 0 references () [0] - Dead Value +DeadTest.+withDefaultValue: 0 references () [0] - Dead Value +DeadTest.+bar: 0 references () [0] - Dead Value +DeadTest.+foo: 0 references () [1] - Dead Value +DeadTest.+cb: 0 references () [0] - Dead Value +DeadTest.+cb: 0 references () [0] - Dead Value +DeadTest.+recWithCallback: 0 references () [0] - Dead Value +DeadTest.+rec2: 0 references () [0] - Dead Value +DeadTest.+rec1: 0 references () [0] - Dead Value +DeadTest.+split_map: 0 references () [0] - Dead Value +DeadTest.+unusedRec: 0 references () [0] - Dead Value +DeadTest.MM.+valueOnlyInImplementation: 0 references () [0] - Live Value +DeadTest.MM.+x: 1 references (DeadTest.res:69:9) [1] - Live Value +DeadTest.MM.+x: 1 references (DeadTest.res:60:2) [0] - Dead Value +DeadTest.MM.+y: 0 references () [1] - Live Value +DeadTest.MM.+y: 1 references (DeadTest.res:64:6) [0] - Dead Value +DeadTest.UnderscoreInside.+_: 0 references () [0] - Dead Value +DeadTest.+_: 0 references () [0] - Dead Value +DeadTest.+_: 0 references () [0] - Live RecordLabel +DeadTest.record.yyy: 1 references (DeadTest.res:53:9) [0] - Live RecordLabel +DeadTest.record.xxx: 1 references (DeadTest.res:52:13) [0] - Dead Value +DeadTest.+_: 0 references () [0] - Dead Value +DeadTest.+_: 0 references () [0] - Live Value +DeadTest.VariantUsedOnlyInImplementation.+a: 1 references (DeadTest.res:42:17) [1] - Live Value +DeadTest.VariantUsedOnlyInImplementation.+a: 1 references (DeadTest.res:36:2) [0] - Live VariantCase +DeadTest.VariantUsedOnlyInImplementation.t.A: 1 references (DeadTest.res:39:10) [0] - Live VariantCase +DeadTest.VariantUsedOnlyInImplementation.t.A: 1 references (DeadTest.res:38:11) [0] - Dead Value +DeadTest.M.+thisSignatureItemIsDead: 0 references () [1] - Dead Value +DeadTest.M.+thisSignatureItemIsDead: 0 references () [0] - Dead Value +DeadTest.Inner.+thisIsAlsoMarkedDead: 0 references () [0] - Live Value +DeadTest.+thisIsMarkedLive: 0 references () [0] - Live Value +DeadTest.+thisIsKeptAlive: 1 references (DeadTest.res:20:4) [0] - Dead Value +DeadTest.+thisIsMarkedDead: 0 references () [0] - Live Value +DeadTest.+thisIsUsedTwice: 2 references (DeadTest.res:11:7, DeadTest.res:12:7) [0] - Live Value +DeadTest.+thisIsUsedOnce: 1 references (DeadTest.res:8:7) [0] - Live Value +DeadTest.+fortyTwoButExported: 0 references () [0] - Dead Value +DeadTest.+fortytwo: 0 references () [0] - Dead Value +DeadTestBlacklist.+x: 0 references () [0] - Dead Value +DeadTestWithInterface.Ext_buffer.+x: 0 references () [1] - Dead Value +DeadTestWithInterface.Ext_buffer.+x: 0 references () [0] - Dead VariantCase DeadTypeTest.deadType.InNeither: 0 references () [0] - Live VariantCase +DeadTypeTest.deadType.InBoth: 1 references (DeadTypeTest.res:13:8) [1] - Live VariantCase DeadTypeTest.deadType.InBoth: 2 references (DeadTest.res:45:8, DeadTypeTest.res:9:2) [0] - Live VariantCase DeadTypeTest.deadType.OnlyInInterface: 1 references (DeadTest.res:44:8) [0] - Live VariantCase +DeadTypeTest.deadType.OnlyInImplementation: 1 references (DeadTypeTest.res:12:8) [1] - Live VariantCase DeadTypeTest.deadType.OnlyInImplementation: 1 references (DeadTypeTest.res:7:2) [0] - Dead Value DeadTypeTest.+a: 0 references () [0] - Dead VariantCase DeadTypeTest.t.B: 0 references () [0] - Live VariantCase +DeadTypeTest.t.A: 1 references (DeadTypeTest.res:4:8) [1] - Live VariantCase DeadTypeTest.t.A: 1 references (DeadTypeTest.res:2:2) [0] - Live Value +Docstrings.+unitArgWithConversionU: 0 references () [0] - Live Value +Docstrings.+unitArgWithConversion: 0 references () [0] - Dead VariantCase +Docstrings.t.B: 0 references () [0] - Live VariantCase +Docstrings.t.A: 2 references (Docstrings.res:64:34, Docstrings.res:67:39) [0] - Live Value +Docstrings.+unitArgWithoutConversionU: 0 references () [0] - Live Value +Docstrings.+unitArgWithoutConversion: 0 references () [0] - Live Value +Docstrings.+grouped: 0 references () [0] - Live Value +Docstrings.+unnamed2U: 0 references () [0] - Live Value +Docstrings.+unnamed2: 0 references () [0] - Live Value +Docstrings.+unnamed1U: 0 references () [0] - Live Value +Docstrings.+unnamed1: 0 references () [0] - Live Value +Docstrings.+useParamU: 0 references () [0] - Live Value +Docstrings.+useParam: 0 references () [0] - Live Value +Docstrings.+treeU: 0 references () [0] - Live Value +Docstrings.+twoU: 0 references () [0] - Live Value +Docstrings.+oneU: 0 references () [0] - Live Value +Docstrings.+tree: 0 references () [0] - Live Value +Docstrings.+two: 0 references () [0] - Live Value +Docstrings.+one: 0 references () [0] - Live Value +Docstrings.+signMessage: 0 references () [0] - Live Value +Docstrings.+flat: 0 references () [0] - Live Value +EmptyArray.Z.+make: 1 references (EmptyArray.res:10:9) [0] - Dead Value +EverythingLiveHere.+z: 0 references () [0] - Dead Value +EverythingLiveHere.+y: 0 references () [0] - Dead Value +EverythingLiveHere.+x: 0 references () [0] - Live Value +FirstClassModules.+someFunctorAsFunction: 0 references () [0] - Live Value +FirstClassModules.SomeFunctor.+ww: 1 references (FirstClassModules.res:57:2) [0] - Live Value +FirstClassModules.+testConvert: 0 references () [0] - Live Value +FirstClassModules.+firstClassModule: 0 references () [0] - Live Value +FirstClassModules.M.+x: 1 references (FirstClassModules.res:2:2) [0] - Live Value +FirstClassModules.M.Z.+u: 1 references (FirstClassModules.res:37:4) [0] - Live Value +FirstClassModules.M.InnerModule3.+k3: 1 references (FirstClassModules.res:14:4) [0] - Live Value +FirstClassModules.M.InnerModule2.+k: 1 references (FirstClassModules.res:10:4) [0] - Live Value +FirstClassModules.M.+y: 1 references (FirstClassModules.res:20:2) [0] - Dead Value FirstClassModulesInterface.+r: 0 references () [0] - Dead RecordLabel FirstClassModulesInterface.record.y: 0 references () [0] - Dead RecordLabel FirstClassModulesInterface.record.x: 0 references () [0] - Live Value +Hooks.RenderPropRequiresConversion.+car: 1 references (Hooks.res:65:30) [0] - Live RecordLabel +Hooks.RenderPropRequiresConversion.props.renderVehicle: 1 references (_none_:1:-1) [0] - Live Value +Hooks.RenderPropRequiresConversion.+make: 0 references () [0] - Dead RecordLabel +Hooks.r.x: 0 references () [0] - Live Value +Hooks.+functionWithRenamedArgs: 0 references () [0] - Live Value +Hooks.NoProps.+make: 0 references () [0] - Live RecordLabel +Hooks.Inner.Inner2.props.vehicle: 1 references (_none_:1:-1) [0] - Live Value +Hooks.Inner.Inner2.+make: 0 references () [0] - Live RecordLabel +Hooks.Inner.props.vehicle: 1 references (_none_:1:-1) [0] - Live Value +Hooks.Inner.+make: 0 references () [0] - Live Value +Hooks.+default: 0 references () [0] - Live RecordLabel +Hooks.props.vehicle: 1 references (_none_:1:-1) [0] - Live Value +Hooks.+make: 1 references (Hooks.res:25:4) [0] - Live RecordLabel +Hooks.vehicle.name: 5 references (Hooks.res:10:29, Hooks.res:29:66, Hooks.res:33:68, Hooks.res:47:2, Hooks.res:47:14) [0] - Live RecordLabel +ImportIndex.props.method: 0 references () [0] - Live Value +ImportIndex.+make: 0 references () [0] - Dead Value +ImportMyBanner.+make: 0 references () [0] - Live Value +ImportMyBanner.+make: 0 references () [0] - Dead RecordLabel +ImportMyBanner.message.text: 0 references () [0] - Live VariantCase InnerModuleTypes.I.t.Foo: 1 references (TestInnedModuleTypes.res:1:8) [1] - Live VariantCase +InnerModuleTypes.I.t.Foo: 1 references (InnerModuleTypes.resi:2:11) [0] - Live Value +JsxV4.C.+make: 1 references (JsxV4.res:7:9) [0] - Live Value +LetPrivate.+y: 0 references () [0] - Live Value +LetPrivate.local_1.+x: 1 references (LetPrivate.res:7:4) [0] - Live Value +ModuleAliases.+testInner2: 0 references () [0] - Live Value +ModuleAliases.+testInner: 0 references () [0] - Live Value +ModuleAliases.+testNested: 0 references () [0] - Dead RecordLabel +ModuleAliases.Outer2.Inner2.InnerNested.t.nested: 0 references () [0] - Dead RecordLabel +ModuleAliases.Outer.Inner.innerT.inner: 0 references () [0] - Dead Value +ModuleAliases2.+q: 0 references () [0] - Dead RecordLabel +ModuleAliases2.Outer.Inner.inner.inner: 0 references () [0] - Dead RecordLabel +ModuleAliases2.Outer.outer.outer: 0 references () [0] - Dead RecordLabel +ModuleAliases2.record.y: 0 references () [0] - Dead RecordLabel +ModuleAliases2.record.x: 0 references () [0] - Live Value +ModuleExceptionBug.+ddjdj: 1 references (ModuleExceptionBug.res:8:7) [0] - Dead Exception +ModuleExceptionBug.MyOtherException: 0 references () [0] - Dead Value +ModuleExceptionBug.Dep.+customDouble: 0 references () [0] - Live Value +NestedModules.Universe.+someString: 0 references () [0] - Dead VariantCase +NestedModules.Universe.variant.B: 0 references () [0] - Dead VariantCase +NestedModules.Universe.variant.A: 0 references () [0] - Live Value +NestedModules.Universe.Nested2.+nested2Function: 0 references () [0] - Live Value +NestedModules.Universe.Nested2.Nested3.+nested3Function: 0 references () [0] - Live Value +NestedModules.Universe.Nested2.Nested3.+nested3Value: 0 references () [0] - Dead Value +NestedModules.Universe.Nested2.Nested3.+w: 0 references () [0] - Dead Value +NestedModules.Universe.Nested2.Nested3.+z: 0 references () [0] - Dead Value +NestedModules.Universe.Nested2.Nested3.+y: 0 references () [0] - Dead Value +NestedModules.Universe.Nested2.Nested3.+x: 0 references () [0] - Dead Value +NestedModules.Universe.Nested2.+y: 0 references () [0] - Live Value +NestedModules.Universe.Nested2.+nested2Value: 0 references () [0] - Dead Value +NestedModules.Universe.Nested2.+x: 0 references () [0] - Dead Value +NestedModules.Universe.+notExported: 0 references () [0] - Live Value +NestedModules.Universe.+theAnswer: 0 references () [0] - Live Value +NestedModules.+notNested: 0 references () [0] - Live Value NestedModulesInSignature.Universe.+theAnswer: 0 references () [0] - Dead RecordLabel +Newsyntax.record2.yy: 0 references () [0] - Dead RecordLabel +Newsyntax.record2.xx: 0 references () [0] - Dead VariantCase +Newsyntax.variant.C: 0 references () [0] - Dead VariantCase +Newsyntax.variant.B: 0 references () [0] - Dead VariantCase +Newsyntax.variant.A: 0 references () [0] - Dead RecordLabel +Newsyntax.record.yyy: 0 references () [0] - Dead RecordLabel +Newsyntax.record.xxx: 0 references () [0] - Dead Value +Newsyntax.+y: 0 references () [0] - Dead Value +Newsyntax.+x: 0 references () [0] - Live Value +Newton.+result: 2 references (Newton.res:31:8, Newton.res:31:18) [0] - Live Value +Newton.+fPrimed: 1 references (Newton.res:29:4) [0] - Live Value +Newton.+f: 2 references (Newton.res:29:4, Newton.res:31:16) [0] - Live Value +Newton.+newton: 1 references (Newton.res:29:4) [2] - Live Value +Newton.+loop: 1 references (Newton.res:6:4) [1] - Live Value +Newton.+next: 1 references (Newton.res:14:10) [0] - Live Value +Newton.+previous: 2 references (Newton.res:14:10, Newton.res:16:8) [0] - Live Value +Newton.+iterateMore: 1 references (Newton.res:14:10) [1] - Live Value +Newton.+delta: 1 references (Newton.res:8:6) [0] - Live Value +Newton.+current: 3 references (Newton.res:8:6, Newton.res:14:10, Newton.res:15:8) [0] - Live Value +Newton.+/: 1 references (Newton.res:16:8) [0] - Live Value +Newton.+*: 2 references (Newton.res:25:4, Newton.res:27:4) [0] - Live Value +Newton.++: 1 references (Newton.res:25:4) [0] - Live Value +Newton.+-: 4 references (Newton.res:9:8, Newton.res:16:8, Newton.res:25:4, Newton.res:27:4) [0] - Live Value +Opaque.+testConvertNestedRecordFromOtherFile: 0 references () [0] - Live Value +Opaque.+noConversion: 0 references () [0] - Dead VariantCase +Opaque.opaqueFromRecords.A: 0 references () [0] - Live Value +OptionalArgsLiveDead.+liveCaller: 1 references (OptionalArgsLiveDead.res:7:8) [0] - Dead Value +OptionalArgsLiveDead.+deadCaller: 0 references () [0] - Live Value +OptionalArgsLiveDead.+formatDate: 1 references (OptionalArgsLiveDead.res:5:4) [0] - Live Value +Records.+testMyRecBsAs2: 0 references () [0] - Live Value +Records.+testMyRecBsAs: 0 references () [0] - Live RecordLabel +Records.myRecBsAs.type_: 1 references (Records.res:145:38) [0] - Live Value +Records.+testMyObj2: 0 references () [0] - Live Value +Records.+testMyObj: 0 references () [0] - Live Value +Records.+testMyRec2: 0 references () [0] - Live Value +Records.+testMyRec: 0 references () [0] - Live RecordLabel +Records.myRec.type_: 1 references (Records.res:127:30) [0] - Live Value +Records.+computeArea4: 0 references () [0] - Live Value +Records.+computeArea3: 0 references () [0] - Live Value +Records.+someBusiness2: 0 references () [0] - Live Value +Records.+findAddress2: 0 references () [0] - Live RecordLabel +Records.business2.address2: 1 references (Records.res:97:2) [0] - Dead RecordLabel +Records.business2.owner: 0 references () [0] - Dead RecordLabel +Records.business2.name: 0 references () [0] - Live Value +Records.+getPayloadRecordPlusOne: 0 references () [0] - Live Value +Records.+payloadValue: 0 references () [0] - Live Value +Records.+recordValue: 1 references (Records.res:80:4) [0] - Live Value +Records.+getPayloadRecord: 0 references () [0] - Dead RecordLabel +Records.record.w: 0 references () [0] - Live RecordLabel +Records.record.v: 1 references (Records.res:85:5) [0] - Live Value +Records.+getPayload: 0 references () [0] - Live RecordLabel +Records.payload.payload: 3 references (Records.res:65:18, Records.res:74:24, Records.res:83:31) [0] - Dead RecordLabel +Records.payload.num: 0 references () [0] - Live Value +Records.+findAllAddresses: 0 references () [0] - Live Value +Records.+someBusiness: 0 references () [0] - Live Value +Records.+findAddress: 0 references () [0] - Live Value +Records.+getOpt: 3 references (Records.res:39:4, Records.res:46:4, Records.res:96:4) [0] - Live RecordLabel +Records.business.address: 2 references (Records.res:40:2, Records.res:50:6) [0] - Live RecordLabel +Records.business.owner: 1 references (Records.res:51:6) [0] - Dead RecordLabel +Records.business.name: 0 references () [0] - Live RecordLabel +Records.person.address: 1 references (Records.res:51:42) [0] - Dead RecordLabel +Records.person.age: 0 references () [0] - Dead RecordLabel +Records.person.name: 0 references () [0] - Live Value +Records.+coord2d: 0 references () [0] - Live Value +Records.+computeArea: 0 references () [0] - Live Value +Records.+origin: 0 references () [0] - Live RecordLabel +Records.coord.z: 1 references (Records.res:14:19) [0] - Live RecordLabel +Records.coord.y: 1 references (Records.res:14:19) [0] - Live RecordLabel +Records.coord.x: 1 references (Records.res:14:19) [0] - Live Value +References.+preserveRefIdentity: 0 references () [0] - Live Value +References.+destroysRefIdentity: 0 references () [0] - Dead RecordLabel +References.requiresConversion.x: 0 references () [0] - Live Value +References.+set: 0 references () [0] - Live Value +References.+make: 0 references () [0] - Live Value +References.+get: 0 references () [0] - Live Value +References.R.+set: 1 references (References.res:37:4) [1] - Live Value +References.R.+set: 1 references (References.res:19:2) [0] - Live Value +References.R.+make: 1 references (References.res:34:4) [1] - Live Value +References.R.+make: 1 references (References.res:18:2) [0] - Live Value +References.R.+get: 1 references (References.res:31:4) [1] - Live Value +References.R.+get: 1 references (References.res:17:2) [0] - Live Value +References.+update: 0 references () [0] - Live Value +References.+access: 0 references () [0] - Live Value +References.+create: 0 references () [0] - Live Value +RepeatedLabel.+userData: 1 references (RepeatedLabel.res:14:7) [0] - Dead RecordLabel +RepeatedLabel.tabState.f: 0 references () [0] - Live RecordLabel +RepeatedLabel.tabState.b: 1 references (RepeatedLabel.res:12:16) [0] - Live RecordLabel +RepeatedLabel.tabState.a: 1 references (RepeatedLabel.res:12:16) [0] - Dead RecordLabel +RepeatedLabel.userData.b: 0 references () [0] - Dead RecordLabel +RepeatedLabel.userData.a: 0 references () [0] - Dead Value +Shadow.M.+test: 0 references () [0] - Live Value +Shadow.M.+test: 0 references () [0] - Live Value +Shadow.+test: 0 references () [0] - Live Value +Shadow.+test: 0 references () [0] - Live Value +TestEmitInnerModules.Outer.Medium.Inner.+y: 0 references () [0] - Live Value +TestEmitInnerModules.Inner.+y: 0 references () [0] - Live Value +TestEmitInnerModules.Inner.+x: 0 references () [0] - Live Value +TestFirstClassModules.+convertFirstClassModuleWithTypeEquations: 0 references () [0] - Live Value +TestFirstClassModules.+convertRecord: 0 references () [0] - Live Value +TestFirstClassModules.+convertInterface: 0 references () [0] - Live Value +TestFirstClassModules.+convert: 0 references () [0] - Dead Value +TestImmutableArray.+testBeltArraySet: 0 references () [0] - Dead Value +TestImmutableArray.+testBeltArrayGet: 0 references () [0] - Live Value +TestImmutableArray.+testImmutableArrayGet: 0 references () [0] - Live Value +TestImport.+defaultValue2: 0 references () [0] - Dead Value +TestImport.+make: 0 references () [0] - Live Value +TestImport.+make: 0 references () [0] - Dead RecordLabel +TestImport.message.text: 0 references () [0] - Live Value +TestImport.+defaultValue: 0 references () [0] - Live Value +TestImport.+valueStartingWithUpperCaseLetter: 0 references () [0] - Dead Value +TestImport.+innerStuffContents: 0 references () [0] - Live Value +TestImport.+innerStuffContentsAsEmptyObject: 0 references () [0] - Live Value +TestImport.+innerStuffContents: 0 references () [0] - Dead Value +TestInnedModuleTypes.+_: 0 references () [0] - Live Value +TestModuleAliases.+testInner2Expanded: 0 references () [0] - Live Value +TestModuleAliases.+testInner2: 0 references () [0] - Live Value +TestModuleAliases.+testInner1Expanded: 0 references () [0] - Live Value +TestModuleAliases.+testInner1: 0 references () [0] - Live Value +TestOptArg.+liveSuppressesOptArgs: 1 references (TestOptArg.res:16:8) [0] - Live Value +TestOptArg.+notSuppressesOptArgs: 1 references (TestOptArg.res:11:8) [0] - Live Value +TestOptArg.+bar: 1 references (TestOptArg.res:7:7) [0] - Live Value +TestOptArg.+foo: 1 references (TestOptArg.res:5:4) [0] - Live Value +TestPromise.+convert: 0 references () [0] - Dead RecordLabel +TestPromise.toPayload.result: 0 references () [0] - Live RecordLabel +TestPromise.fromPayload.s: 1 references (TestPromise.res:14:32) [0] - Dead RecordLabel +TestPromise.fromPayload.x: 0 references () [0] - Dead Value +ToSuppress.+toSuppress: 0 references () [0] - Live Value +TransitiveType1.+convertAlias: 0 references () [0] - Live Value +TransitiveType1.+convert: 0 references () [0] - Dead Value +TransitiveType2.+convertT2: 0 references () [0] - Live Value +TransitiveType3.+convertT3: 0 references () [0] - Dead RecordLabel +TransitiveType3.t3.s: 0 references () [0] - Dead RecordLabel +TransitiveType3.t3.i: 0 references () [0] - Live Value +Tuples.+changeSecondAge: 0 references () [0] - Live Value +Tuples.+marry: 0 references () [0] - Live Value +Tuples.+getFirstName: 0 references () [0] - Live RecordLabel +Tuples.person.age: 1 references (Tuples.res:49:84) [0] - Live RecordLabel +Tuples.person.name: 1 references (Tuples.res:43:49) [0] - Live Value +Tuples.+coord2d: 0 references () [0] - Live Value +Tuples.+computeAreaNoConverters: 0 references () [0] - Live Value +Tuples.+computeAreaWithIdent: 0 references () [0] - Live Value +Tuples.+computeArea: 0 references () [0] - Live Value +Tuples.+origin: 0 references () [0] - Live Value +Tuples.+testTuple: 0 references () [0] - Dead Value +TypeParams1.+exportSomething: 0 references () [0] - Dead Value +TypeParams2.+exportSomething: 0 references () [0] - Dead RecordLabel +TypeParams2.item.id: 0 references () [0] - Live Value +TypeParams3.+test2: 0 references () [0] - Live Value +TypeParams3.+test: 0 references () [0] - Dead Value +Types.ObjectId.+x: 0 references () [0] - Live Value +Types.+optFunction: 0 references () [0] - Live Value +Types.+i64Const: 0 references () [0] - Live Value +Types.+currentTime: 0 references () [0] - Live Value +Types.+testInstantiateTypeParameter: 0 references () [0] - Dead RecordLabel +Types.someRecord.id: 0 references () [0] - Live Value +Types.+setMatch: 0 references () [0] - Live Value +Types.+testMarshalFields: 0 references () [0] - Live Value +Types.+testConvertNull: 0 references () [0] - Dead RecordLabel +Types.record.s: 0 references () [0] - Dead RecordLabel +Types.record.i: 0 references () [0] - Live Value +Types.+jsonStringify: 0 references () [0] - Live Value +Types.+jsString2T: 0 references () [0] - Live Value +Types.+jsStringT: 0 references () [0] - Dead VariantCase +Types.opaqueVariant.B: 0 references () [0] - Dead VariantCase +Types.opaqueVariant.A: 0 references () [0] - Live Value +Types.+testFunctionOnOptionsAsArgument: 0 references () [0] - Live Value +Types.+mutuallyRecursiveConverter: 0 references () [0] - Live Value +Types.+selfRecursiveConverter: 0 references () [0] - Dead RecordLabel +Types.mutuallyRecursiveB.a: 0 references () [0] - Live RecordLabel +Types.mutuallyRecursiveA.b: 1 references (Types.res:49:34) [0] - Live RecordLabel +Types.selfRecursive.self: 1 references (Types.res:42:30) [0] - Live Value +Types.+swap: 0 references () [0] - Dead VariantCase +Types.typeWithVars.B: 0 references () [0] - Dead VariantCase +Types.typeWithVars.A: 0 references () [0] - Live Value +Types.+map: 0 references () [0] - Live Value +Types.+someIntList: 0 references () [0] - Live Value +Unboxed.+r2Test: 0 references () [0] - Dead RecordLabel +Unboxed.r2.B.g: 0 references () [0] - Dead VariantCase +Unboxed.r2.B: 0 references () [0] - Dead RecordLabel +Unboxed.r1.x: 0 references () [0] - Live Value +Unboxed.+testV1: 0 references () [0] - Dead VariantCase +Unboxed.v2.A: 0 references () [0] - Dead VariantCase +Unboxed.v1.A: 0 references () [0] - Live Value +Uncurried.+sumLblCurried: 0 references () [0] - Live Value +Uncurried.+sumCurried: 0 references () [0] - Live Value +Uncurried.+sumU2: 0 references () [0] - Live Value +Uncurried.+sumU: 0 references () [0] - Live Value +Uncurried.+callback2U: 0 references () [0] - Live Value +Uncurried.+callback2: 0 references () [0] - Live RecordLabel +Uncurried.authU.loginU: 1 references (Uncurried.res:38:25) [0] - Live RecordLabel +Uncurried.auth.login: 1 references (Uncurried.res:35:24) [0] - Live Value +Uncurried.+callback: 0 references () [0] - Live Value +Uncurried.+curried3: 0 references () [0] - Live Value +Uncurried.+uncurried3: 0 references () [0] - Live Value +Uncurried.+uncurried2: 0 references () [0] - Live Value +Uncurried.+uncurried1: 0 references () [0] - Live Value +Uncurried.+uncurried0: 0 references () [0] - Live Value +Unison.+toString: 3 references (Unison.res:37:0, Unison.res:38:0, Unison.res:39:0) [0] - Live Value +Unison.+fits: 1 references (Unison.res:26:8) [0] - Live Value +Unison.+group: 2 references (Unison.res:38:25, Unison.res:39:25) [0] - Live VariantCase +Unison.stack.Cons: 2 references (Unison.res:38:20, Unison.res:39:20) [0] - Live VariantCase +Unison.stack.Empty: 3 references (Unison.res:37:20, Unison.res:38:53, Unison.res:39:52) [0] - Live RecordLabel +Unison.t.doc: 2 references (Unison.res:23:9, Unison.res:28:9) [0] - Live RecordLabel +Unison.t.break: 1 references (Unison.res:28:9) [0] - Live VariantCase +Unison.break.Always: 1 references (Unison.res:39:38) [0] - Live VariantCase +Unison.break.Never: 1 references (Unison.res:38:38) [0] - Live VariantCase +Unison.break.IfNeed: 1 references (Unison.res:17:20) [0] - Live Value +UseImportJsValue.+useTypeImportedInOtherModule: 0 references () [0] - Live Value +UseImportJsValue.+useGetProp: 0 references () [0] - Live Value +Variants.+restResult3: 0 references () [0] - Live Value +Variants.+restResult2: 0 references () [0] - Live Value +Variants.+restResult1: 0 references () [0] - Dead VariantCase +Variants.result1.Error: 0 references () [0] - Dead VariantCase +Variants.result1.Ok: 0 references () [0] - Live Value +Variants.+polyWithOpt: 0 references () [0] - Dead VariantCase +Variants.type_.Type: 0 references () [0] - Live Value +Variants.+id2: 0 references () [0] - Live Value +Variants.+id1: 0 references () [0] - Live Value +Variants.+testConvert2to3: 0 references () [0] - Live Value +Variants.+testConvert3: 0 references () [0] - Live Value +Variants.+testConvert2: 0 references () [0] - Live Value +Variants.+fortytwoBAD: 0 references () [0] - Live Value +Variants.+fortytwoOK: 0 references () [0] - Live Value +Variants.+testConvert: 0 references () [0] - Live Value +Variants.+swap: 0 references () [0] - Live Value +Variants.+onlySunday: 0 references () [0] - Live Value +Variants.+sunday: 0 references () [0] - Live Value +Variants.+saturday: 0 references () [0] - Live Value +Variants.+monday: 0 references () [0] - Live Value +Variants.+isWeekend: 0 references () [0] - Live Value +VariantsWithPayload.+testVariant1Object: 0 references () [0] - Dead VariantCase +VariantsWithPayload.variant1Object.R: 0 references () [0] - Live Value +VariantsWithPayload.+testVariant1Int: 0 references () [0] - Dead VariantCase +VariantsWithPayload.variant1Int.R: 0 references () [0] - Live Value +VariantsWithPayload.+printVariantWithPayloads: 0 references () [0] - Live Value +VariantsWithPayload.+testVariantWithPayloads: 0 references () [0] - Dead VariantCase +VariantsWithPayload.variantWithPayloads.E: 0 references () [0] - Dead VariantCase +VariantsWithPayload.variantWithPayloads.D: 0 references () [0] - Dead VariantCase +VariantsWithPayload.variantWithPayloads.C: 0 references () [0] - Dead VariantCase +VariantsWithPayload.variantWithPayloads.B: 0 references () [0] - Dead VariantCase +VariantsWithPayload.variantWithPayloads.A: 0 references () [0] - Live Value +VariantsWithPayload.+testSimpleVariant: 0 references () [0] - Dead VariantCase +VariantsWithPayload.simpleVariant.C: 0 references () [0] - Dead VariantCase +VariantsWithPayload.simpleVariant.B: 0 references () [0] - Dead VariantCase +VariantsWithPayload.simpleVariant.A: 0 references () [0] - Live Value +VariantsWithPayload.+printManyPayloads: 0 references () [0] - Live Value +VariantsWithPayload.+testManyPayloads: 0 references () [0] - Live Value +VariantsWithPayload.+printVariantWithPayload: 0 references () [0] - Live Value +VariantsWithPayload.+testWithPayload: 0 references () [0] - Live RecordLabel +VariantsWithPayload.payload.y: 2 references (VariantsWithPayload.res:26:74, VariantsWithPayload.res:44:72) [0] - Live RecordLabel +VariantsWithPayload.payload.x: 2 references (VariantsWithPayload.res:26:57, VariantsWithPayload.res:44:55) [0] - Live Value +DeadExn.+eInside: 1 references (DeadExn.res:12:7) [0] - Dead Value +DeadExn.+eToplevel: 0 references () [0] - Dead Exception +DeadExn.DeadE: 0 references () [0] - Live Exception +DeadExn.Inside.Einside: 1 references (DeadExn.res:10:14) [0] - Live Exception +DeadExn.Etoplevel: 1 references (DeadExn.res:8:16) [0] - Live RecordLabel +DeadTypeTest.record.z: 0 references () [0] - Live RecordLabel +DeadTypeTest.record.y: 0 references () [0] - Live RecordLabel +DeadTypeTest.record.x: 0 references () [0] - Dead Value +DeadTypeTest.+_: 0 references () [0] - Dead Value +DeadTypeTest.+_: 0 references () [0] - Dead VariantCase +DeadTypeTest.deadType.InNeither: 0 references () [0] - Live VariantCase +DeadTypeTest.deadType.OnlyInInterface: 1 references (DeadTypeTest.resi:8:2) [0] - Dead Value +DeadTypeTest.+a: 0 references () [0] - Dead VariantCase +DeadTypeTest.t.B: 0 references () [0] - Dead Value DeadValueTest.+valueDead: 0 references () [0] - Live Value DeadValueTest.+valueAlive: 1 references (DeadTest.res:73:16) [0] - Live RecordLabel +DynamicallyLoadedComponent.props.s: 1 references (_none_:1:-1) [0] - Live Value +DynamicallyLoadedComponent.+make: 1 references (DeadTest.res:110:17) [0] - Dead Value ErrorHandler.+x: 0 references () [0] - Live Value ErrorHandler.Make.+notify: 1 references (CreateErrorHandler1.res:8:0) [0] - Dead Value +FirstClassModulesInterface.+r: 0 references () [0] - Dead RecordLabel +FirstClassModulesInterface.record.y: 0 references () [0] - Dead RecordLabel +FirstClassModulesInterface.record.x: 0 references () [0] - Dead Value ImmutableArray.+eq: 0 references () [0] - Dead Value ImmutableArray.+eqU: 0 references () [0] - Dead Value ImmutableArray.+cmp: 0 references () [0] - Dead Value ImmutableArray.+cmpU: 0 references () [0] - Dead Value ImmutableArray.+some2: 0 references () [0] - Dead Value ImmutableArray.+some2U: 0 references () [0] - Dead Value ImmutableArray.+every2: 0 references () [0] - Dead Value ImmutableArray.+every2U: 0 references () [0] - Dead Value ImmutableArray.+every: 0 references () [0] - Dead Value ImmutableArray.+everyU: 0 references () [0] - Dead Value ImmutableArray.+some: 0 references () [0] - Dead Value ImmutableArray.+someU: 0 references () [0] - Dead Value ImmutableArray.+reduceReverse2: 0 references () [0] - Dead Value ImmutableArray.+reduceReverse2U: 0 references () [0] - Dead Value ImmutableArray.+reduceReverse: 0 references () [0] - Dead Value ImmutableArray.+reduceReverseU: 0 references () [0] - Dead Value ImmutableArray.+reduce: 0 references () [0] - Dead Value ImmutableArray.+reduceU: 0 references () [0] - Dead Value ImmutableArray.+partition: 0 references () [0] - Dead Value ImmutableArray.+partitionU: 0 references () [0] - Dead Value ImmutableArray.+mapWithIndex: 0 references () [0] - Dead Value ImmutableArray.+mapWithIndexU: 0 references () [0] - Dead Value ImmutableArray.+forEachWithIndex: 0 references () [0] - Dead Value ImmutableArray.+forEachWithIndexU: 0 references () [0] - Dead Value ImmutableArray.+keepMap: 0 references () [0] - Dead Value ImmutableArray.+keepMapU: 0 references () [0] - Dead Value ImmutableArray.+keepWithIndex: 0 references () [0] - Dead Value ImmutableArray.+keepWithIndexU: 0 references () [0] - Dead Value ImmutableArray.+map: 0 references () [0] - Dead Value ImmutableArray.+mapU: 0 references () [0] - Dead Value ImmutableArray.+forEach: 0 references () [0] - Dead Value ImmutableArray.+forEachU: 0 references () [0] - Dead Value ImmutableArray.+copy: 0 references () [0] - Dead Value ImmutableArray.+sliceToEnd: 0 references () [0] - Dead Value ImmutableArray.+slice: 0 references () [0] - Dead Value ImmutableArray.+concatMany: 0 references () [0] - Dead Value ImmutableArray.+concat: 0 references () [0] - Dead Value ImmutableArray.+unzip: 0 references () [0] - Dead Value ImmutableArray.+zipBy: 0 references () [0] - Dead Value ImmutableArray.+zipByU: 0 references () [0] - Dead Value ImmutableArray.+zip: 0 references () [0] - Dead Value ImmutableArray.+makeByAndShuffle: 0 references () [0] - Dead Value ImmutableArray.+makeByAndShuffleU: 0 references () [0] - Dead Value ImmutableArray.+makeBy: 0 references () [0] - Dead Value ImmutableArray.+makeByU: 0 references () [0] - Dead Value ImmutableArray.+rangeBy: 0 references () [0] - Dead Value ImmutableArray.+range: 0 references () [0] - Dead Value ImmutableArray.+make: 0 references () [0] - Dead Value ImmutableArray.+makeUninitializedUnsafe: 0 references () [0] - Dead Value ImmutableArray.+makeUninitialized: 0 references () [0] - Dead Value ImmutableArray.+reverse: 0 references () [0] - Dead Value ImmutableArray.+shuffle: 0 references () [0] - Dead Value ImmutableArray.+getUndefined: 0 references () [0] - Dead Value ImmutableArray.+getUnsafe: 0 references () [0] - Dead Value ImmutableArray.+getExn: 0 references () [0] - Dead Value ImmutableArray.+get: 0 references () [0] - Dead Value ImmutableArray.+size: 0 references () [0] - Dead Value ImmutableArray.+length: 0 references () [0] - Dead Value ImmutableArray.+toArray: 0 references () [0] - Live Value ImmutableArray.+fromArray: 1 references (DeadTest.res:1:15) [0] - Live Value ImmutableArray.Array.+get: 1 references (TestImmutableArray.res:2:4) [0] - Live RecordLabel +ImportHookDefault.props.renderMe: 0 references () [0] - Live RecordLabel +ImportHookDefault.props.children: 0 references () [0] - Live RecordLabel +ImportHookDefault.props.person: 0 references () [0] - Live Value +ImportHookDefault.+make: 1 references (Hooks.res:17:5) [0] - Dead RecordLabel +ImportHookDefault.person.age: 0 references () [0] - Dead RecordLabel +ImportHookDefault.person.name: 0 references () [0] - Live Value +ImportHooks.+foo: 0 references () [0] - Live RecordLabel +ImportHooks.props.renderMe: 0 references () [0] - Live RecordLabel +ImportHooks.props.children: 0 references () [0] - Live RecordLabel +ImportHooks.props.person: 0 references () [0] - Live Value +ImportHooks.+make: 1 references (Hooks.res:14:5) [0] - Dead RecordLabel +ImportHooks.person.age: 0 references () [0] - Dead RecordLabel +ImportHooks.person.name: 0 references () [0] - Live Value +ImportJsValue.+default: 0 references () [0] - Live Value +ImportJsValue.+polymorphic: 0 references () [0] - Live Value +ImportJsValue.+convertVariant: 0 references () [0] - Dead VariantCase +ImportJsValue.variant.S: 0 references () [0] - Dead VariantCase +ImportJsValue.variant.I: 0 references () [0] - Live Value +ImportJsValue.+returnedFromHigherOrder: 0 references () [0] - Live Value +ImportJsValue.+higherOrder: 1 references (ImportJsValue.res:64:4) [0] - Live Value +ImportJsValue.+useColor: 0 references () [0] - Live Value +ImportJsValue.+useGetAbs: 0 references () [0] - Live Value +ImportJsValue.+useGetProp: 0 references () [0] - Live Value +ImportJsValue.AbsoluteValue.+getAbs: 1 references (ImportJsValue.res:50:4) [1] - Live Value +ImportJsValue.AbsoluteValue.+getAbs: 1 references (ImportJsValue.res:40:6) [0] - Live Value +ImportJsValue.+areaValue: 0 references () [0] - Live Value +ImportJsValue.+roundedNumber: 0 references () [0] - Live Value +ImportJsValue.+returnMixedArray: 0 references () [0] - Live Value +ImportJsValue.+area: 1 references (ImportJsValue.res:30:4) [0] - Dead RecordLabel +ImportJsValue.point.y: 0 references () [0] - Dead RecordLabel +ImportJsValue.point.x: 0 references () [0] - Live Value +ImportJsValue.+round: 1 references (ImportJsValue.res:27:4) [0] - Live Value +NestedModulesInSignature.Universe.+theAnswer: 1 references (NestedModulesInSignature.resi:2:2) [0] - Live Value OptArg.+bar: 1 references (TestOptArg.res:1:7) [0] - Dead Value OptArg.+foo: 0 references () [0] - Dead Value +DeadValueTest.+tail: 0 references () [0] - Dead Value +DeadValueTest.+subList: 0 references () [0] - Dead Value +DeadValueTest.+valueOnlyInImplementation: 0 references () [0] - Dead Value +DeadValueTest.+valueDead: 0 references () [0] - Live Value +DeadValueTest.+valueAlive: 1 references (DeadValueTest.resi:1:0) [0] - Dead Value +ErrorHandler.+x: 0 references () [0] - Live Value +ErrorHandler.Make.+notify: 1 references (ErrorHandler.resi:7:2) [0] - Dead Value +ImmutableArray.+eq: 0 references () [0] - Dead Value +ImmutableArray.+eqU: 0 references () [0] - Dead Value +ImmutableArray.+cmp: 0 references () [0] - Dead Value +ImmutableArray.+cmpU: 0 references () [0] - Dead Value +ImmutableArray.+some2: 0 references () [0] - Dead Value +ImmutableArray.+some2U: 0 references () [0] - Dead Value +ImmutableArray.+every2: 0 references () [0] - Dead Value +ImmutableArray.+every2U: 0 references () [0] - Dead Value +ImmutableArray.+every: 0 references () [0] - Dead Value +ImmutableArray.+everyU: 0 references () [0] - Dead Value +ImmutableArray.+some: 0 references () [0] - Dead Value +ImmutableArray.+someU: 0 references () [0] - Dead Value +ImmutableArray.+reduceReverse2: 0 references () [0] - Dead Value +ImmutableArray.+reduceReverse2U: 0 references () [0] - Dead Value +ImmutableArray.+reduceReverse: 0 references () [0] - Dead Value +ImmutableArray.+reduceReverseU: 0 references () [0] - Dead Value +ImmutableArray.+reduce: 0 references () [0] - Dead Value +ImmutableArray.+reduceU: 0 references () [0] - Dead Value +ImmutableArray.+partition: 0 references () [0] - Dead Value +ImmutableArray.+partitionU: 0 references () [0] - Dead Value +ImmutableArray.+mapWithIndex: 0 references () [0] - Dead Value +ImmutableArray.+mapWithIndexU: 0 references () [0] - Dead Value +ImmutableArray.+forEachWithIndex: 0 references () [0] - Dead Value +ImmutableArray.+forEachWithIndexU: 0 references () [0] - Dead Value +ImmutableArray.+keepMap: 0 references () [0] - Dead Value +ImmutableArray.+keepMapU: 0 references () [0] - Dead Value +ImmutableArray.+keepWithIndex: 0 references () [0] - Dead Value +ImmutableArray.+keepWithIndexU: 0 references () [0] - Dead Value +ImmutableArray.+map: 0 references () [0] - Dead Value +ImmutableArray.+mapU: 0 references () [0] - Dead Value +ImmutableArray.+forEach: 0 references () [0] - Dead Value +ImmutableArray.+forEachU: 0 references () [0] - Dead Value +ImmutableArray.+copy: 0 references () [0] - Dead Value +ImmutableArray.+sliceToEnd: 0 references () [0] - Dead Value +ImmutableArray.+slice: 0 references () [0] - Dead Value +ImmutableArray.+concatMany: 0 references () [0] - Dead Value +ImmutableArray.+concat: 0 references () [0] - Dead Value +ImmutableArray.+unzip: 0 references () [0] - Dead Value +ImmutableArray.+zipBy: 0 references () [0] - Dead Value +ImmutableArray.+zipByU: 0 references () [0] - Dead Value +ImmutableArray.+zip: 0 references () [0] - Dead Value +ImmutableArray.+makeByAndShuffle: 0 references () [0] - Dead Value +ImmutableArray.+makeByAndShuffleU: 0 references () [0] - Dead Value +ImmutableArray.+makeBy: 0 references () [0] - Dead Value +ImmutableArray.+makeByU: 0 references () [0] - Dead Value +ImmutableArray.+rangeBy: 0 references () [0] - Dead Value +ImmutableArray.+range: 0 references () [0] - Dead Value +ImmutableArray.+make: 0 references () [0] - Dead Value +ImmutableArray.+makeUninitializedUnsafe: 0 references () [0] - Dead Value +ImmutableArray.+makeUninitialized: 0 references () [0] - Dead Value +ImmutableArray.+reverse: 0 references () [0] - Dead Value +ImmutableArray.+shuffle: 0 references () [0] - Dead Value +ImmutableArray.+getUndefined: 0 references () [0] - Dead Value +ImmutableArray.+getUnsafe: 0 references () [0] - Dead Value +ImmutableArray.+getExn: 0 references () [0] - Live Value +ImmutableArray.+get: 1 references (ImmutableArray.resi:6:2) [0] - Dead Value +ImmutableArray.+size: 0 references () [0] - Dead Value +ImmutableArray.+length: 0 references () [0] - Dead Value +ImmutableArray.+toArray: 0 references () [0] - Live Value +ImmutableArray.+fromArray: 1 references (ImmutableArray.resi:9:0) [0] - Live Value +OptArg.+wrapfourArgs: 2 references (OptArg.res:28:7, OptArg.res:29:7) [0] - Live Value +OptArg.+fourArgs: 1 references (OptArg.res:26:4) [0] - Live Value +OptArg.+wrapOneArg: 1 references (OptArg.res:22:7) [0] - Live Value +OptArg.+oneArg: 1 references (OptArg.res:20:4) [0] - Live Value +OptArg.+twoArgs: 1 references (OptArg.res:16:10) [0] - Live Value +OptArg.+threeArgs: 2 references (OptArg.res:11:7, OptArg.res:12:7) [0] - Live Value +OptArg.+bar: 2 references (OptArg.res:7:7, OptArg.resi:2:0) [0] - Live Value +OptArg.+foo: 1 references (OptArg.res:5:7) [0] +Forward Liveness Analysis + + Root (annotated): Value +Hooks.+default + Root (external ref): Value +FirstClassModules.M.InnerModule2.+k + Root (external ref): VariantCase DeadRT.moduleAccessPath.Root + Root (annotated): Value +NestedModules.Universe.Nested2.Nested3.+nested3Function + Root (annotated): Value +Docstrings.+tree + Root (annotated): Value +ImportJsValue.+areaValue + Root (annotated): Value +ImportJsValue.+useGetProp + Root (external ref): Value +CreateErrorHandler2.Error2.+notification + Root (annotated): Value +DeadTest.+fortyTwoButExported + Root (annotated): Value +Docstrings.+grouped + Root (external ref): RecordLabel +DeadTest.inlineRecord.IR.b + Root (annotated): Value +NestedModules.Universe.Nested2.+nested2Function + Root (annotated): Value +Tuples.+marry + Root (annotated): Value +Types.+i64Const + Root (external ref): VariantCase +DeadTypeTest.deadType.OnlyInImplementation + Root (annotated): Value +TestImport.+valueStartingWithUpperCaseLetter + Root (external ref): Value +OptionalArgsLiveDead.+liveCaller + Root (annotated): RecordLabel +ImportHookDefault.props.renderMe + Root (annotated): Value +TypeParams3.+test + Root (annotated): Value +Variants.+sunday + Root (annotated): Value +Docstrings.+unnamed1U + Root (annotated): Value +NestedModules.Universe.Nested2.Nested3.+nested3Value + Root (annotated): Value +DeadTest.GloobLive.+globallyLive2 + Root (annotated): Value +Hooks.+functionWithRenamedArgs + Root (external ref): RecordLabel +Unison.t.doc + Root (annotated): Value +Tuples.+computeAreaWithIdent + Root (annotated): Value +LetPrivate.+y + Root (annotated): Value +TestImport.+innerStuffContentsAsEmptyObject + Root (external ref): Value +TestOptArg.+notSuppressesOptArgs + Root (annotated): Value +Types.+testFunctionOnOptionsAsArgument + Root (annotated): Value +Docstrings.+unitArgWithoutConversionU + Root (annotated): Value +Uncurried.+sumU + Root (annotated): Value +Tuples.+getFirstName + Root (external ref): Value +Newton.+f + Root (external ref): RecordLabel +Records.record.v + Root (external ref): VariantCase +DeadTest.VariantUsedOnlyInImplementation.t.A + Root (annotated): Value +DeadTest.GloobLive.+globallyLive3 + Root (external ref): Value +Hooks.RenderPropRequiresConversion.+car + Root (external ref): RecordLabel +Records.person.address + Root (annotated): Value +Variants.+testConvert2 + Root (annotated): Value +Tuples.+coord2d + Root (external ref): Value +CreateErrorHandler1.Error1.+notification + Root (annotated): Value +Docstrings.+unnamed1 + Root (annotated): Value +Docstrings.+unitArgWithConversionU + Root (annotated): Value +TransitiveType3.+convertT3 + Root (annotated): Value +Variants.+swap + Root (annotated): Value +Shadow.+test + Root (annotated): Value +Variants.+testConvert3 + Root (annotated): Value +DeadTest.+thisIsMarkedLive + Root (annotated): Value +NestedModules.+notNested + Root (annotated): Value +Records.+computeArea + Root (annotated): Value +Docstrings.+oneU + Root (annotated): Value +ImportHooks.+foo + Root (annotated): RecordLabel +ImportIndex.props.method + Root (external ref): Value +FirstClassModules.M.Z.+u + Root (external ref): VariantCase +Docstrings.t.A + Root (annotated): Value +ImportJsValue.+convertVariant + Root (annotated): Value +VariantsWithPayload.+testVariant1Int + Root (external ref): Value +DynamicallyLoadedComponent.+make + Root (annotated): Value +Uncurried.+uncurried3 + Root (annotated): Value +UseImportJsValue.+useTypeImportedInOtherModule + Root (annotated): Value +Hooks.NoProps.+make + Root (external ref): Value +OptArg.+foo + Root (annotated): Value +Variants.+fortytwoOK + Root (external ref): Value OptArg.+bar + Root (annotated): Value +Records.+payloadValue + Root (external ref): RecordLabel +DeadTest.props.s + Root (annotated): Value +Uncurried.+callback2 + Root (annotated): Value +ImportJsValue.+higherOrder + Root (annotated): Value +TestEmitInnerModules.Inner.+y + Root (external ref): VariantCase InnerModuleTypes.I.t.Foo + Root (annotated): Value +Types.+selfRecursiveConverter + Root (external ref): Value +DeadTest.+thisIsUsedTwice + Root (annotated): Value +Opaque.+testConvertNestedRecordFromOtherFile + Root (annotated): Value +Hooks.Inner.Inner2.+make + Root (external ref): RecordLabel +Hooks.Inner.Inner2.props.vehicle + Root (external ref): Value +OptArg.+bar + Root (annotated): Value +TestFirstClassModules.+convertRecord + Root (external ref): VariantCase DeadTypeTest.deadType.OnlyInInterface + Root (external ref): RecordLabel +Records.myRecBsAs.type_ + Root (external ref): VariantCase +Unison.break.Never + Root (annotated): Value +Variants.+restResult3 + Root (external ref): RecordLabel +Tuples.person.name + Root (external ref): Value +FirstClassModules.M.InnerModule3.+k3 + Root (external ref): VariantCase +Unison.break.Always + Root (external ref): RecordLabel +Records.coord.x + Root (annotated): RecordLabel +DeadTypeTest.record.y + Root (annotated): Value +TestImport.+defaultValue + Root (external ref): Value +OptArg.+threeArgs + Root (annotated): Value +Types.+setMatch + Root (annotated): Value +Docstrings.+signMessage + Root (external ref): Value +DeadExn.+eInside + Root (external ref): VariantCase +DeadTest.inlineRecord.IR + Root (external ref): RecordLabel +ComponentAsProp.props.button + Root (annotated): Value +TestImport.+innerStuffContents + Root (external ref): Value +ModuleExceptionBug.+ddjdj + Root (annotated): Value +TransitiveType1.+convert + Root (annotated): Value +ImportJsValue.+polymorphic + Root (annotated): Value +ImportHooks.+make + Root (external ref): Value +DeadTest.+make + Root (annotated): Value +Records.+testMyRecBsAs + Root (external ref): Value +DeadTest.+ira + Root (external ref): Value +Unison.+toString + Root (external ref): Value +DeadTest.+deadIncorrect + Root (annotated): Value +Records.+origin + Root (annotated): Value +Variants.+onlySunday + Root (annotated): Value +TypeParams3.+test2 + Root (annotated): Value +Tuples.+origin + Root (annotated): Value +Uncurried.+sumLblCurried + Root (annotated): Value +Tuples.+computeArea + Root (annotated): Value +References.+get + Root (annotated): Value +ModuleAliases.+testNested + Root (external ref): Value +FirstClassModules.SomeFunctor.+ww + Root (external ref): VariantCase +DeadTest.WithInclude.t.A + Root (external ref): Value +Unison.+group + Root (annotated): Value +ImportJsValue.+area + Root (annotated): Value +Records.+testMyRec + Root (annotated): Value +ImportJsValue.+roundedNumber + Root (external ref): RecordLabel +RepeatedLabel.tabState.a + Root (external ref): Value ErrorHandler.Make.+notify + Root (annotated): Value +References.+make + Root (annotated): Value +Shadow.+test + Root (external ref): RecordLabel +Types.mutuallyRecursiveA.b + Root (annotated): RecordLabel +ImportHooks.props.renderMe + Root (annotated): Value +Uncurried.+callback + Root (annotated): Value +TestPromise.+convert + Root (external ref): Value +EmptyArray.Z.+make + Root (external ref): Value +Newton.+result + Root (annotated): Value +Records.+findAllAddresses + Root (annotated): Value +Variants.+id2 + Root (external ref): Value +TestOptArg.+bar + Root (external ref): RecordLabel +DeadTest.record.yyy + Root (annotated): Value +Docstrings.+unitArgWithoutConversion + Root (annotated): Value +Hooks.Inner.+make + Root (annotated): Value +Uncurried.+curried3 + Root (external ref): Value +OptArg.+twoArgs + Root (external ref): RecordLabel +Records.business2.address2 + Root (annotated): Value +Tuples.+testTuple + Root (annotated): Value +Records.+testMyObj2 + Root (annotated): Value +Uncurried.+uncurried1 + Root (external ref): Value +DeadTest.VariantUsedOnlyInImplementation.+a + Root (annotated): Value +ImportMyBanner.+make + Root (external ref): RecordLabel +Records.payload.payload + Root (annotated): Value +Docstrings.+one + Root (annotated): Value +ImportJsValue.+returnMixedArray + Root (annotated): Value +TestEmitInnerModules.Outer.Medium.Inner.+y + Root (annotated): Value +TestEmitInnerModules.Inner.+x + Root (external ref): Value +OptArg.+wrapOneArg + Root (external ref): RecordLabel +ComponentAsProp.props.title + Root (annotated): Value +Records.+findAddress + Root (annotated): Value +VariantsWithPayload.+printVariantWithPayload + Root (annotated): Value +Docstrings.+two + Root (annotated): Value +TestImmutableArray.+testImmutableArrayGet + Root (annotated): Value +Uncurried.+sumU2 + Root (annotated): Value +Hooks.RenderPropRequiresConversion.+make + Root (annotated): Value +LetPrivate.local_1.+x + Root (annotated): Value +TestImport.+make + Root (external ref): RecordLabel +Unison.t.break + Root (annotated): Value +ImportJsValue.+default + Root (annotated): Value +Types.+optFunction + Root (annotated): Value +Records.+getPayloadRecordPlusOne + Root (annotated): Value +Types.+swap + Root (annotated): Value +Types.+jsonStringify + Root (annotated): RecordLabel +ImportHookDefault.props.person + Root (annotated): Value +Variants.+saturday + Root (annotated): Value +Records.+findAddress2 + Root (annotated): Value +Records.+someBusiness + Root (external ref): RecordLabel +Hooks.vehicle.name + Root (external ref): RecordLabel +Uncurried.authU.loginU + Root (annotated): Value +Docstrings.+unnamed2 + Root (annotated): Value +References.+preserveRefIdentity + Root (annotated): Value +Types.+jsStringT + Root (annotated): Value +Variants.+restResult1 + Root (annotated): Value +Uncurried.+sumCurried + Root (annotated): Value +References.+set + Root (external ref): Value +DeadTest.MM.+x + Root (annotated): Value +ModuleAliases.+testInner + Root (external ref): RecordLabel +ComponentAsProp.props.description + Root (annotated): Value +Uncurried.+callback2U + Root (annotated): Value +Tuples.+changeSecondAge + Root (annotated): RecordLabel +DeadTest.inlineRecord.IR.e + Root (annotated): Value +Records.+recordValue + Root (annotated): Value +ImportHookDefault.+make + Root (annotated): Value +Types.+map + Root (annotated): Value +Types.+testInstantiateTypeParameter + Root (annotated): RecordLabel +DeadTypeTest.record.x + Root (external ref): RecordLabel +Records.myRec.type_ + Root (annotated): Value +TestOptArg.+liveSuppressesOptArgs + Root (annotated): Value +ImportJsValue.+useGetAbs + Root (annotated): Value NestedModulesInSignature.Universe.+theAnswer + Root (annotated): Value +References.+create + Root (annotated): Value +Types.+currentTime + Root (annotated): Value +Uncurried.+uncurried2 + Root (annotated): Value +Records.+someBusiness2 + Root (annotated): Value +FirstClassModules.+testConvert + Root (external ref): RecordLabel +DeadTest.inlineRecord.IR.c + Root (external ref): RecordLabel +Records.coord.z + Root (annotated): Value +Types.+someIntList + Root (annotated): Value +Types.+jsString2T + Root (annotated): Value +Records.+coord2d + Root (external ref): RecordLabel +DynamicallyLoadedComponent.props.s + Root (external ref): RecordLabel +Tuples.person.age + Root (annotated): Value +NestedModules.Universe.+someString + Root (annotated): Value +TestFirstClassModules.+convertInterface + Root (external ref): RecordLabel +TestPromise.fromPayload.s + Root (annotated): Value +Types.+testMarshalFields + Root (external ref): RecordLabel +VariantsWithPayload.payload.x + Root (annotated): RecordLabel +ImportHooks.props.children + Root (external ref): VariantCase +DeadTypeTest.t.A + Root (annotated): RecordLabel +DeadTypeTest.record.z + Root (annotated): Value +Docstrings.+flat + Root (annotated): Value +NestedModules.Universe.Nested2.+nested2Value + Root (annotated): Value +Records.+testMyObj + Root (external ref): VariantCase DeadTypeTest.deadType.InBoth + Root (annotated): Value +Records.+testMyRecBsAs2 + Root (annotated): Value +VariantsWithPayload.+testManyPayloads + Root (annotated): Value +FirstClassModules.+someFunctorAsFunction + Root (annotated): Value +Records.+computeArea3 + Root (annotated): Value +Variants.+fortytwoBAD + Root (external ref): Value +DeadTest.+thisIsUsedOnce + Root (annotated): Value +ImportJsValue.+returnedFromHigherOrder + Root (external ref): Value ImmutableArray.+fromArray + Root (external ref): Value +RepeatedLabel.+userData + Root (annotated): Value +Variants.+testConvert2to3 + Root (external ref): Value +OptArg.+wrapfourArgs + Root (annotated): Value +ImportJsValue.+round + Root (annotated): Value +TestModuleAliases.+testInner2 + Root (annotated): Value +VariantsWithPayload.+testSimpleVariant + Root (annotated): Value +TestFirstClassModules.+convert + Root (external ref): VariantCase +DeadRT.moduleAccessPath.Kaboom + Root (external ref): Value +DeadCodeImplementation.M.+x + Root (external ref): RecordLabel +Uncurried.auth.login + Root (annotated): Value +VariantsWithPayload.+testVariantWithPayloads + Root (annotated): Value +Variants.+restResult2 + Root (annotated): Value +Docstrings.+unitArgWithConversion + Root (annotated): Value +ImportJsValue.+useColor + Root (annotated): Value +Records.+getPayload + Root (external ref): VariantCase +Unison.break.IfNeed + Root (external ref): Value +FirstClassModules.M.+y + Root (annotated): Value +ModuleAliases.+testInner2 + Root (annotated): RecordLabel +ImportHooks.props.person + Root (external ref): Value DeadValueTest.+valueAlive + Root (external ref): RecordLabel +Hooks.Inner.props.vehicle + Root (annotated): Value +Shadow.M.+test + Root (annotated): Value +ComponentAsProp.+make + Root (annotated): Value +Records.+testMyRec2 + Root (annotated): Value +VariantsWithPayload.+printManyPayloads + Root (annotated): Value +TestFirstClassModules.+convertFirstClassModuleWithTypeEquations + Root (annotated): Value +TransitiveType1.+convertAlias + Root (external ref): Exception +DeadExn.Inside.Einside + Root (annotated): Value +TestImport.+defaultValue2 + Root (external ref): Exception +DeadExn.Etoplevel + Root (annotated): Value +Variants.+monday + Root (annotated): Value +VariantsWithPayload.+printVariantWithPayloads + Root (annotated): Value +Unboxed.+r2Test + Root (external ref): RecordLabel +Records.coord.y + Root (external ref): RecordLabel +DeadTest.record.xxx + Root (annotated): Value +FirstClassModules.+firstClassModule + Root (external ref): RecordLabel +Hooks.props.vehicle + Root (annotated): Value +Docstrings.+useParamU + Root (external ref): Value +JsxV4.C.+make + Root (external ref): RecordLabel +Types.selfRecursive.self + Root (annotated): Value +Variants.+polyWithOpt + Root (annotated): Value +References.+destroysRefIdentity + Root (annotated): Value +Docstrings.+unnamed2U + Root (external ref): Value +FirstClassModules.M.+x + Root (annotated): Value +Uncurried.+uncurried0 + Root (external ref): VariantCase +Unison.stack.Empty + Root (annotated): Value +Records.+computeArea4 + Root (annotated): Value +TestModuleAliases.+testInner1Expanded + Root (annotated): Value +ImportIndex.+make + Root (annotated): Value +Unboxed.+testV1 + Root (annotated): Value +NestedModules.Universe.+theAnswer + Root (annotated): Value +References.+access + Root (annotated): Value +TestModuleAliases.+testInner2Expanded + Root (annotated): Value +Variants.+isWeekend + Root (annotated): Value +Variants.+testConvert + Root (annotated): Value +Variants.+id1 + Root (annotated): Value +VariantsWithPayload.+testVariant1Object + Root (annotated): Value +References.+update + Root (annotated): Value +Docstrings.+treeU + Root (annotated): Value +Opaque.+noConversion + Root (external ref): RecordLabel +RepeatedLabel.tabState.b + Root (annotated): Value +Docstrings.+twoU + Root (annotated): Value +DeadTest.GloobLive.+globallyLive1 + Root (external ref): RecordLabel +Records.business.owner + Root (external ref): VariantCase +Unison.stack.Cons + Root (external ref): VariantCase +DeadTypeTest.deadType.InBoth + Root (external ref): RecordLabel +Records.business.address + Root (external ref): RecordLabel +VariantsWithPayload.payload.y + Root (annotated): RecordLabel +ImportHookDefault.props.children + Root (annotated): Value +TestModuleAliases.+testInner1 + Root (annotated): Value +VariantsWithPayload.+testWithPayload + Root (annotated): Value +Types.+testConvertNull + Root (annotated): Value +Records.+getPayloadRecord + Root (annotated): Value +Tuples.+computeAreaNoConverters + Root (annotated): Value +Docstrings.+useParam + Root (annotated): Value +Types.+mutuallyRecursiveConverter + Root (annotated): Value +UseImportJsValue.+useGetProp + Root (external ref): RecordLabel +Hooks.RenderPropRequiresConversion.props.renderVehicle + + 300 roots found + + Propagate: +Hooks.+default -> +Hooks.+make + Propagate: DeadRT.moduleAccessPath.Root -> +DeadRT.moduleAccessPath.Root + Propagate: +DeadTypeTest.deadType.OnlyInImplementation -> DeadTypeTest.deadType.OnlyInImplementation + Propagate: +OptionalArgsLiveDead.+liveCaller -> +OptionalArgsLiveDead.+formatDate + Propagate: +Newton.+f -> +Newton.+- + Propagate: +Newton.+f -> +Newton.++ + Propagate: +Newton.+f -> +Newton.+* + Propagate: +DeadTest.VariantUsedOnlyInImplementation.t.A -> +DeadTest.VariantUsedOnlyInImplementation.t.A + Propagate: +DeadTest.+thisIsMarkedLive -> +DeadTest.+thisIsKeptAlive + Propagate: InnerModuleTypes.I.t.Foo -> +InnerModuleTypes.I.t.Foo + Propagate: DeadTypeTest.deadType.OnlyInInterface -> +DeadTypeTest.deadType.OnlyInInterface + Propagate: +Unison.+toString -> +Unison.+fits + Propagate: +References.+get -> +References.R.+get + Propagate: +DeadTest.WithInclude.t.A -> +DeadTest.WithInclude.t.A + Propagate: ErrorHandler.Make.+notify -> +ErrorHandler.Make.+notify + Propagate: +References.+make -> +References.R.+make + Propagate: +Newton.+result -> +Newton.+newton + Propagate: +Newton.+result -> +Newton.+fPrimed + Propagate: +Records.+findAllAddresses -> +Records.+getOpt + Propagate: +TestOptArg.+bar -> +TestOptArg.+foo + Propagate: +DeadTest.VariantUsedOnlyInImplementation.+a -> +DeadTest.VariantUsedOnlyInImplementation.+a + Propagate: +OptArg.+wrapOneArg -> +OptArg.+oneArg + Propagate: +TestImmutableArray.+testImmutableArrayGet -> ImmutableArray.Array.+get + Propagate: +References.+set -> +References.R.+set + Propagate: +DeadTest.MM.+x -> +DeadTest.MM.+x + Propagate: +ImportJsValue.+useGetAbs -> +ImportJsValue.AbsoluteValue.+getAbs + Propagate: NestedModulesInSignature.Universe.+theAnswer -> +NestedModulesInSignature.Universe.+theAnswer + Propagate: +DeadTypeTest.t.A -> DeadTypeTest.t.A + Propagate: ImmutableArray.+fromArray -> +ImmutableArray.+fromArray + Propagate: +OptArg.+wrapfourArgs -> +OptArg.+fourArgs + Propagate: +DeadRT.moduleAccessPath.Kaboom -> DeadRT.moduleAccessPath.Kaboom + Propagate: DeadValueTest.+valueAlive -> +DeadValueTest.+valueAlive + Propagate: +References.R.+get -> +References.R.+get + Propagate: +References.R.+make -> +References.R.+make + Propagate: +Newton.+newton -> +Newton.+/ + Propagate: +Newton.+newton -> +Newton.+current + Propagate: +Newton.+newton -> +Newton.+iterateMore + Propagate: +Newton.+newton -> +Newton.+delta + Propagate: +Newton.+newton -> +Newton.+loop + Propagate: +Newton.+newton -> +Newton.+previous + Propagate: +Newton.+newton -> +Newton.+next + Propagate: ImmutableArray.Array.+get -> +ImmutableArray.+get + Propagate: +References.R.+set -> +References.R.+set + Propagate: +DeadTest.MM.+x -> +DeadTest.MM.+y + Propagate: +ImportJsValue.AbsoluteValue.+getAbs -> +ImportJsValue.AbsoluteValue.+getAbs + + 45 declarations marked live via propagation + + Dead VariantCase +AutoAnnotate.variant.R + Dead RecordLabel +AutoAnnotate.record.variant + Dead RecordLabel +AutoAnnotate.r2.r2 + Dead RecordLabel +AutoAnnotate.r3.r3 + Dead RecordLabel +AutoAnnotate.r4.r4 + Dead VariantCase +AutoAnnotate.annotatedVariant.R2 + Dead VariantCase +AutoAnnotate.annotatedVariant.R4 + Dead Value +BucklescriptAnnotations.+bar + Dead Value +BucklescriptAnnotations.+f + Live (annotated) Value +ComponentAsProp.+make + Live (external ref) RecordLabel +ComponentAsProp.props.title + Live (external ref) RecordLabel +ComponentAsProp.props.description + Live (external ref) RecordLabel +ComponentAsProp.props.button + Live (external ref) Value +CreateErrorHandler1.Error1.+notification + Live (external ref) Value +CreateErrorHandler2.Error2.+notification + Live (external ref) Value +DeadCodeImplementation.M.+x + Live (external ref) Exception +DeadExn.Etoplevel + Live (external ref) Exception +DeadExn.Inside.Einside + Dead Exception +DeadExn.DeadE + Dead Value +DeadExn.+eToplevel + Live (external ref) Value +DeadExn.+eInside + Live (propagated) VariantCase +DeadRT.moduleAccessPath.Root + Live (external ref) VariantCase +DeadRT.moduleAccessPath.Kaboom + Dead Value +DeadRT.+emitModuleAccessPath + Live (external ref) VariantCase DeadRT.moduleAccessPath.Root + Live (propagated) VariantCase DeadRT.moduleAccessPath.Kaboom + Dead Value +DeadTest.+fortytwo + Live (annotated) Value +DeadTest.+fortyTwoButExported + Live (external ref) Value +DeadTest.+thisIsUsedOnce + Live (external ref) Value +DeadTest.+thisIsUsedTwice + Dead Value +DeadTest.+thisIsMarkedDead + Live (propagated) Value +DeadTest.+thisIsKeptAlive + Live (annotated) Value +DeadTest.+thisIsMarkedLive + Dead Value +DeadTest.Inner.+thisIsAlsoMarkedDead + Dead Value +DeadTest.M.+thisSignatureItemIsDead + Dead Value +DeadTest.M.+thisSignatureItemIsDead + Live (propagated) VariantCase +DeadTest.VariantUsedOnlyInImplementation.t.A + Live (external ref) Value +DeadTest.VariantUsedOnlyInImplementation.+a + Live (external ref) VariantCase +DeadTest.VariantUsedOnlyInImplementation.t.A + Live (propagated) Value +DeadTest.VariantUsedOnlyInImplementation.+a + Dead Value +DeadTest.+_ + Dead Value +DeadTest.+_ + Live (external ref) RecordLabel +DeadTest.record.xxx + Live (external ref) RecordLabel +DeadTest.record.yyy + Dead Value +DeadTest.+_ + Dead Value +DeadTest.+_ + Dead Value +DeadTest.UnderscoreInside.+_ + Live (external ref) Value +DeadTest.MM.+x + Dead Value +DeadTest.MM.+y + Live (propagated) Value +DeadTest.MM.+y + Live (propagated) Value +DeadTest.MM.+x + Dead Value +DeadTest.MM.+valueOnlyInImplementation + Dead Value +DeadTest.+unusedRec + Dead Value +DeadTest.+split_map + Dead Value +DeadTest.+rec1 + Dead Value +DeadTest.+rec2 + Dead Value +DeadTest.+recWithCallback + Dead Value +DeadTest.+cb + Dead Value +DeadTest.+foo + Dead Value +DeadTest.+cb + Dead Value +DeadTest.+bar + Dead Value +DeadTest.+withDefaultValue + Dead Value +DeadTest.+zzz + Dead Value +DeadTest.+a1 + Dead Value +DeadTest.+a2 + Dead Value +DeadTest.+a3 + Dead Value +DeadTest.+second + Dead Value +DeadTest.+deadRef + Live (external ref) Value +DeadTest.+make + Live (external ref) RecordLabel +DeadTest.props.s + Dead Value +DeadTest.+theSideEffectIsLogging + Dead Value +DeadTest.+stringLengthNoSideEffects + Live (annotated) Value +DeadTest.GloobLive.+globallyLive1 + Live (annotated) Value +DeadTest.GloobLive.+globallyLive2 + Live (annotated) Value +DeadTest.GloobLive.+globallyLive3 + Live (external ref) VariantCase +DeadTest.WithInclude.t.A + Live (propagated) VariantCase +DeadTest.WithInclude.t.A + Dead Value +DeadTest.+funWithInnerVars + Dead Value +DeadTest.+x + Dead Value +DeadTest.+y + Dead RecordLabel +DeadTest.rc.a + Live (external ref) Value +DeadTest.+deadIncorrect + Dead Value +DeadTest.+_ + Live (external ref) VariantCase +DeadTest.inlineRecord.IR + Dead RecordLabel +DeadTest.inlineRecord.IR.a + Live (external ref) RecordLabel +DeadTest.inlineRecord.IR.b + Live (external ref) RecordLabel +DeadTest.inlineRecord.IR.c + Dead RecordLabel +DeadTest.inlineRecord.IR.d + Live (annotated) RecordLabel +DeadTest.inlineRecord.IR.e + Live (external ref) Value +DeadTest.+ira + Dead Value +DeadTest.+_ + Dead VariantCase +DeadTest.inlineRecord2.IR2 + Dead RecordLabel +DeadTest.inlineRecord2.IR2.a + Dead RecordLabel +DeadTest.inlineRecord2.IR2.b + Dead VariantCase +DeadTest.inlineRecord3.IR3 + Dead RecordLabel +DeadTest.inlineRecord3.IR3.a + Dead RecordLabel +DeadTest.inlineRecord3.IR3.b + Dead Value +DeadTestBlacklist.+x + Dead Value +DeadTestWithInterface.Ext_buffer.+x + Dead Value +DeadTestWithInterface.Ext_buffer.+x + Live (external ref) VariantCase +DeadTypeTest.t.A + Dead VariantCase +DeadTypeTest.t.B + Dead Value +DeadTypeTest.+a + Live (external ref) VariantCase +DeadTypeTest.deadType.OnlyInImplementation + Live (propagated) VariantCase +DeadTypeTest.deadType.OnlyInInterface + Live (external ref) VariantCase +DeadTypeTest.deadType.InBoth + Dead VariantCase +DeadTypeTest.deadType.InNeither + Dead Value +DeadTypeTest.+_ + Dead Value +DeadTypeTest.+_ + Live (annotated) RecordLabel +DeadTypeTest.record.x + Live (annotated) RecordLabel +DeadTypeTest.record.y + Live (annotated) RecordLabel +DeadTypeTest.record.z + Live (propagated) VariantCase DeadTypeTest.t.A + Dead VariantCase DeadTypeTest.t.B + Dead Value DeadTypeTest.+a + Live (propagated) VariantCase DeadTypeTest.deadType.OnlyInImplementation + Live (external ref) VariantCase DeadTypeTest.deadType.OnlyInInterface + Live (external ref) VariantCase DeadTypeTest.deadType.InBoth + Dead VariantCase DeadTypeTest.deadType.InNeither + Live (propagated) Value +DeadValueTest.+valueAlive + Dead Value +DeadValueTest.+valueDead + Dead Value +DeadValueTest.+valueOnlyInImplementation + Dead Value +DeadValueTest.+subList + Dead Value +DeadValueTest.+tail + Live (external ref) Value DeadValueTest.+valueAlive + Dead Value DeadValueTest.+valueDead + Live (annotated) Value +Docstrings.+flat + Live (annotated) Value +Docstrings.+signMessage + Live (annotated) Value +Docstrings.+one + Live (annotated) Value +Docstrings.+two + Live (annotated) Value +Docstrings.+tree + Live (annotated) Value +Docstrings.+oneU + Live (annotated) Value +Docstrings.+twoU + Live (annotated) Value +Docstrings.+treeU + Live (annotated) Value +Docstrings.+useParam + Live (annotated) Value +Docstrings.+useParamU + Live (annotated) Value +Docstrings.+unnamed1 + Live (annotated) Value +Docstrings.+unnamed1U + Live (annotated) Value +Docstrings.+unnamed2 + Live (annotated) Value +Docstrings.+unnamed2U + Live (annotated) Value +Docstrings.+grouped + Live (annotated) Value +Docstrings.+unitArgWithoutConversion + Live (annotated) Value +Docstrings.+unitArgWithoutConversionU + Live (external ref) VariantCase +Docstrings.t.A + Dead VariantCase +Docstrings.t.B + Live (annotated) Value +Docstrings.+unitArgWithConversion + Live (annotated) Value +Docstrings.+unitArgWithConversionU + Live (external ref) Value +DynamicallyLoadedComponent.+make + Live (external ref) RecordLabel +DynamicallyLoadedComponent.props.s + Live (external ref) Value +EmptyArray.Z.+make + Live (propagated) Value +ErrorHandler.Make.+notify + Dead Value +ErrorHandler.+x + Live (external ref) Value ErrorHandler.Make.+notify + Dead Value ErrorHandler.+x + Dead Value +EverythingLiveHere.+x + Dead Value +EverythingLiveHere.+y + Dead Value +EverythingLiveHere.+z + Live (external ref) Value +FirstClassModules.M.+y + Live (external ref) Value +FirstClassModules.M.InnerModule2.+k + Live (external ref) Value +FirstClassModules.M.InnerModule3.+k3 + Live (external ref) Value +FirstClassModules.M.Z.+u + Live (external ref) Value +FirstClassModules.M.+x + Live (annotated) Value +FirstClassModules.+firstClassModule + Live (annotated) Value +FirstClassModules.+testConvert + Live (external ref) Value +FirstClassModules.SomeFunctor.+ww + Live (annotated) Value +FirstClassModules.+someFunctorAsFunction + Dead RecordLabel +FirstClassModulesInterface.record.x + Dead RecordLabel +FirstClassModulesInterface.record.y + Dead Value +FirstClassModulesInterface.+r + Dead RecordLabel FirstClassModulesInterface.record.x + Dead RecordLabel FirstClassModulesInterface.record.y + Dead Value FirstClassModulesInterface.+r + Live (external ref) RecordLabel +Hooks.vehicle.name + Live (propagated) Value +Hooks.+make + Live (external ref) RecordLabel +Hooks.props.vehicle + Live (annotated) Value +Hooks.+default + Live (annotated) Value +Hooks.Inner.+make + Live (external ref) RecordLabel +Hooks.Inner.props.vehicle + Live (annotated) Value +Hooks.Inner.Inner2.+make + Live (external ref) RecordLabel +Hooks.Inner.Inner2.props.vehicle + Live (annotated) Value +Hooks.NoProps.+make + Live (annotated) Value +Hooks.+functionWithRenamedArgs + Dead RecordLabel +Hooks.r.x + Live (annotated) Value +Hooks.RenderPropRequiresConversion.+make + Live (external ref) RecordLabel +Hooks.RenderPropRequiresConversion.props.renderVehicle + Live (external ref) Value +Hooks.RenderPropRequiresConversion.+car + Live (propagated) Value +ImmutableArray.+fromArray + Dead Value +ImmutableArray.+toArray + Dead Value +ImmutableArray.+length + Dead Value +ImmutableArray.+size + Live (propagated) Value +ImmutableArray.+get + Dead Value +ImmutableArray.+getExn + Dead Value +ImmutableArray.+getUnsafe + Dead Value +ImmutableArray.+getUndefined + Dead Value +ImmutableArray.+shuffle + Dead Value +ImmutableArray.+reverse + Dead Value +ImmutableArray.+makeUninitialized + Dead Value +ImmutableArray.+makeUninitializedUnsafe + Dead Value +ImmutableArray.+make + Dead Value +ImmutableArray.+range + Dead Value +ImmutableArray.+rangeBy + Dead Value +ImmutableArray.+makeByU + Dead Value +ImmutableArray.+makeBy + Dead Value +ImmutableArray.+makeByAndShuffleU + Dead Value +ImmutableArray.+makeByAndShuffle + Dead Value +ImmutableArray.+zip + Dead Value +ImmutableArray.+zipByU + Dead Value +ImmutableArray.+zipBy + Dead Value +ImmutableArray.+unzip + Dead Value +ImmutableArray.+concat + Dead Value +ImmutableArray.+concatMany + Dead Value +ImmutableArray.+slice + Dead Value +ImmutableArray.+sliceToEnd + Dead Value +ImmutableArray.+copy + Dead Value +ImmutableArray.+forEachU + Dead Value +ImmutableArray.+forEach + Dead Value +ImmutableArray.+mapU + Dead Value +ImmutableArray.+map + Dead Value +ImmutableArray.+keepWithIndexU + Dead Value +ImmutableArray.+keepWithIndex + Dead Value +ImmutableArray.+keepMapU + Dead Value +ImmutableArray.+keepMap + Dead Value +ImmutableArray.+forEachWithIndexU + Dead Value +ImmutableArray.+forEachWithIndex + Dead Value +ImmutableArray.+mapWithIndexU + Dead Value +ImmutableArray.+mapWithIndex + Dead Value +ImmutableArray.+partitionU + Dead Value +ImmutableArray.+partition + Dead Value +ImmutableArray.+reduceU + Dead Value +ImmutableArray.+reduce + Dead Value +ImmutableArray.+reduceReverseU + Dead Value +ImmutableArray.+reduceReverse + Dead Value +ImmutableArray.+reduceReverse2U + Dead Value +ImmutableArray.+reduceReverse2 + Dead Value +ImmutableArray.+someU + Dead Value +ImmutableArray.+some + Dead Value +ImmutableArray.+everyU + Dead Value +ImmutableArray.+every + Dead Value +ImmutableArray.+every2U + Dead Value +ImmutableArray.+every2 + Dead Value +ImmutableArray.+some2U + Dead Value +ImmutableArray.+some2 + Dead Value +ImmutableArray.+cmpU + Dead Value +ImmutableArray.+cmp + Dead Value +ImmutableArray.+eqU + Dead Value +ImmutableArray.+eq + Live (propagated) Value ImmutableArray.Array.+get + Live (external ref) Value ImmutableArray.+fromArray + Dead Value ImmutableArray.+toArray + Dead Value ImmutableArray.+length + Dead Value ImmutableArray.+size + Dead Value ImmutableArray.+get + Dead Value ImmutableArray.+getExn + Dead Value ImmutableArray.+getUnsafe + Dead Value ImmutableArray.+getUndefined + Dead Value ImmutableArray.+shuffle + Dead Value ImmutableArray.+reverse + Dead Value ImmutableArray.+makeUninitialized + Dead Value ImmutableArray.+makeUninitializedUnsafe + Dead Value ImmutableArray.+make + Dead Value ImmutableArray.+range + Dead Value ImmutableArray.+rangeBy + Dead Value ImmutableArray.+makeByU + Dead Value ImmutableArray.+makeBy + Dead Value ImmutableArray.+makeByAndShuffleU + Dead Value ImmutableArray.+makeByAndShuffle + Dead Value ImmutableArray.+zip + Dead Value ImmutableArray.+zipByU + Dead Value ImmutableArray.+zipBy + Dead Value ImmutableArray.+unzip + Dead Value ImmutableArray.+concat + Dead Value ImmutableArray.+concatMany + Dead Value ImmutableArray.+slice + Dead Value ImmutableArray.+sliceToEnd + Dead Value ImmutableArray.+copy + Dead Value ImmutableArray.+forEachU + Dead Value ImmutableArray.+forEach + Dead Value ImmutableArray.+mapU + Dead Value ImmutableArray.+map + Dead Value ImmutableArray.+keepWithIndexU + Dead Value ImmutableArray.+keepWithIndex + Dead Value ImmutableArray.+keepMapU + Dead Value ImmutableArray.+keepMap + Dead Value ImmutableArray.+forEachWithIndexU + Dead Value ImmutableArray.+forEachWithIndex + Dead Value ImmutableArray.+mapWithIndexU + Dead Value ImmutableArray.+mapWithIndex + Dead Value ImmutableArray.+partitionU + Dead Value ImmutableArray.+partition + Dead Value ImmutableArray.+reduceU + Dead Value ImmutableArray.+reduce + Dead Value ImmutableArray.+reduceReverseU + Dead Value ImmutableArray.+reduceReverse + Dead Value ImmutableArray.+reduceReverse2U + Dead Value ImmutableArray.+reduceReverse2 + Dead Value ImmutableArray.+someU + Dead Value ImmutableArray.+some + Dead Value ImmutableArray.+everyU + Dead Value ImmutableArray.+every + Dead Value ImmutableArray.+every2U + Dead Value ImmutableArray.+every2 + Dead Value ImmutableArray.+some2U + Dead Value ImmutableArray.+some2 + Dead Value ImmutableArray.+cmpU + Dead Value ImmutableArray.+cmp + Dead Value ImmutableArray.+eqU + Dead Value ImmutableArray.+eq + Dead RecordLabel +ImportHookDefault.person.name + Dead RecordLabel +ImportHookDefault.person.age + Live (annotated) Value +ImportHookDefault.+make + Live (annotated) RecordLabel +ImportHookDefault.props.person + Live (annotated) RecordLabel +ImportHookDefault.props.children + Live (annotated) RecordLabel +ImportHookDefault.props.renderMe + Dead RecordLabel +ImportHooks.person.name + Dead RecordLabel +ImportHooks.person.age + Live (annotated) Value +ImportHooks.+make + Live (annotated) RecordLabel +ImportHooks.props.person + Live (annotated) RecordLabel +ImportHooks.props.children + Live (annotated) RecordLabel +ImportHooks.props.renderMe + Live (annotated) Value +ImportHooks.+foo + Live (annotated) Value +ImportIndex.+make + Live (annotated) RecordLabel +ImportIndex.props.method + Live (annotated) Value +ImportJsValue.+round + Dead RecordLabel +ImportJsValue.point.x + Dead RecordLabel +ImportJsValue.point.y + Live (annotated) Value +ImportJsValue.+area + Live (annotated) Value +ImportJsValue.+returnMixedArray + Live (annotated) Value +ImportJsValue.+roundedNumber + Live (annotated) Value +ImportJsValue.+areaValue + Live (propagated) Value +ImportJsValue.AbsoluteValue.+getAbs + Live (propagated) Value +ImportJsValue.AbsoluteValue.+getAbs + Live (annotated) Value +ImportJsValue.+useGetProp + Live (annotated) Value +ImportJsValue.+useGetAbs + Live (annotated) Value +ImportJsValue.+useColor + Live (annotated) Value +ImportJsValue.+higherOrder + Live (annotated) Value +ImportJsValue.+returnedFromHigherOrder + Dead VariantCase +ImportJsValue.variant.I + Dead VariantCase +ImportJsValue.variant.S + Live (annotated) Value +ImportJsValue.+convertVariant + Live (annotated) Value +ImportJsValue.+polymorphic + Live (annotated) Value +ImportJsValue.+default + Dead RecordLabel +ImportMyBanner.message.text + Live (annotated) Value +ImportMyBanner.+make + Dead Value +ImportMyBanner.+make + Live (propagated) VariantCase +InnerModuleTypes.I.t.Foo + Live (external ref) VariantCase InnerModuleTypes.I.t.Foo + Live (external ref) Value +JsxV4.C.+make + Live (annotated) Value +LetPrivate.local_1.+x + Live (annotated) Value +LetPrivate.+y + Dead RecordLabel +ModuleAliases.Outer.Inner.innerT.inner + Dead RecordLabel +ModuleAliases.Outer2.Inner2.InnerNested.t.nested + Live (annotated) Value +ModuleAliases.+testNested + Live (annotated) Value +ModuleAliases.+testInner + Live (annotated) Value +ModuleAliases.+testInner2 + Dead RecordLabel +ModuleAliases2.record.x + Dead RecordLabel +ModuleAliases2.record.y + Dead RecordLabel +ModuleAliases2.Outer.outer.outer + Dead RecordLabel +ModuleAliases2.Outer.Inner.inner.inner + Dead Value +ModuleAliases2.+q + Dead Value +ModuleExceptionBug.Dep.+customDouble + Dead Exception +ModuleExceptionBug.MyOtherException + Live (external ref) Value +ModuleExceptionBug.+ddjdj + Live (annotated) Value +NestedModules.+notNested + Live (annotated) Value +NestedModules.Universe.+theAnswer + Dead Value +NestedModules.Universe.+notExported + Dead Value +NestedModules.Universe.Nested2.+x + Live (annotated) Value +NestedModules.Universe.Nested2.+nested2Value + Dead Value +NestedModules.Universe.Nested2.+y + Dead Value +NestedModules.Universe.Nested2.Nested3.+x + Dead Value +NestedModules.Universe.Nested2.Nested3.+y + Dead Value +NestedModules.Universe.Nested2.Nested3.+z + Dead Value +NestedModules.Universe.Nested2.Nested3.+w + Live (annotated) Value +NestedModules.Universe.Nested2.Nested3.+nested3Value + Live (annotated) Value +NestedModules.Universe.Nested2.Nested3.+nested3Function + Live (annotated) Value +NestedModules.Universe.Nested2.+nested2Function + Dead VariantCase +NestedModules.Universe.variant.A + Dead VariantCase +NestedModules.Universe.variant.B + Live (annotated) Value +NestedModules.Universe.+someString + Live (propagated) Value +NestedModulesInSignature.Universe.+theAnswer + Live (annotated) Value NestedModulesInSignature.Universe.+theAnswer + Dead Value +Newsyntax.+x + Dead Value +Newsyntax.+y + Dead RecordLabel +Newsyntax.record.xxx + Dead RecordLabel +Newsyntax.record.yyy + Dead VariantCase +Newsyntax.variant.A + Dead VariantCase +Newsyntax.variant.B + Dead VariantCase +Newsyntax.variant.C + Dead RecordLabel +Newsyntax.record2.xx + Dead RecordLabel +Newsyntax.record2.yy + Live (propagated) Value +Newton.+- + Live (propagated) Value +Newton.++ + Live (propagated) Value +Newton.+* + Live (propagated) Value +Newton.+/ + Live (propagated) Value +Newton.+newton + Live (propagated) Value +Newton.+current + Live (propagated) Value +Newton.+iterateMore + Live (propagated) Value +Newton.+delta + Live (propagated) Value +Newton.+loop + Live (propagated) Value +Newton.+previous + Live (propagated) Value +Newton.+next + Live (external ref) Value +Newton.+f + Live (propagated) Value +Newton.+fPrimed + Live (external ref) Value +Newton.+result + Dead VariantCase +Opaque.opaqueFromRecords.A + Live (annotated) Value +Opaque.+noConversion + Live (annotated) Value +Opaque.+testConvertNestedRecordFromOtherFile + Live (external ref) Value +OptArg.+foo + Live (external ref) Value +OptArg.+bar + Live (external ref) Value +OptArg.+threeArgs + Live (external ref) Value +OptArg.+twoArgs + Live (propagated) Value +OptArg.+oneArg + Live (external ref) Value +OptArg.+wrapOneArg + Live (propagated) Value +OptArg.+fourArgs + Live (external ref) Value +OptArg.+wrapfourArgs + Dead Value OptArg.+foo + Live (external ref) Value OptArg.+bar + Live (propagated) Value +OptionalArgsLiveDead.+formatDate + Dead Value +OptionalArgsLiveDead.+deadCaller + Live (external ref) Value +OptionalArgsLiveDead.+liveCaller + Live (external ref) RecordLabel +Records.coord.x + Live (external ref) RecordLabel +Records.coord.y + Live (external ref) RecordLabel +Records.coord.z + Live (annotated) Value +Records.+origin + Live (annotated) Value +Records.+computeArea + Live (annotated) Value +Records.+coord2d + Dead RecordLabel +Records.person.name + Dead RecordLabel +Records.person.age + Live (external ref) RecordLabel +Records.person.address + Dead RecordLabel +Records.business.name + Live (external ref) RecordLabel +Records.business.owner + Live (external ref) RecordLabel +Records.business.address + Live (propagated) Value +Records.+getOpt + Live (annotated) Value +Records.+findAddress + Live (annotated) Value +Records.+someBusiness + Live (annotated) Value +Records.+findAllAddresses + Dead RecordLabel +Records.payload.num + Live (external ref) RecordLabel +Records.payload.payload + Live (annotated) Value +Records.+getPayload + Live (external ref) RecordLabel +Records.record.v + Dead RecordLabel +Records.record.w + Live (annotated) Value +Records.+getPayloadRecord + Live (annotated) Value +Records.+recordValue + Live (annotated) Value +Records.+payloadValue + Live (annotated) Value +Records.+getPayloadRecordPlusOne + Dead RecordLabel +Records.business2.name + Dead RecordLabel +Records.business2.owner + Live (external ref) RecordLabel +Records.business2.address2 + Live (annotated) Value +Records.+findAddress2 + Live (annotated) Value +Records.+someBusiness2 + Live (annotated) Value +Records.+computeArea3 + Live (annotated) Value +Records.+computeArea4 + Live (external ref) RecordLabel +Records.myRec.type_ + Live (annotated) Value +Records.+testMyRec + Live (annotated) Value +Records.+testMyRec2 + Live (annotated) Value +Records.+testMyObj + Live (annotated) Value +Records.+testMyObj2 + Live (external ref) RecordLabel +Records.myRecBsAs.type_ + Live (annotated) Value +Records.+testMyRecBsAs + Live (annotated) Value +Records.+testMyRecBsAs2 + Live (annotated) Value +References.+create + Live (annotated) Value +References.+access + Live (annotated) Value +References.+update + Live (propagated) Value +References.R.+get + Live (propagated) Value +References.R.+make + Live (propagated) Value +References.R.+set + Live (propagated) Value +References.R.+get + Live (propagated) Value +References.R.+make + Live (propagated) Value +References.R.+set + Live (annotated) Value +References.+get + Live (annotated) Value +References.+make + Live (annotated) Value +References.+set + Dead RecordLabel +References.requiresConversion.x + Live (annotated) Value +References.+destroysRefIdentity + Live (annotated) Value +References.+preserveRefIdentity + Dead RecordLabel +RepeatedLabel.userData.a + Dead RecordLabel +RepeatedLabel.userData.b + Live (external ref) RecordLabel +RepeatedLabel.tabState.a + Live (external ref) RecordLabel +RepeatedLabel.tabState.b + Dead RecordLabel +RepeatedLabel.tabState.f + Live (external ref) Value +RepeatedLabel.+userData + Live (annotated) Value +Shadow.+test + Live (annotated) Value +Shadow.+test + Live (annotated) Value +Shadow.M.+test + Dead Value +Shadow.M.+test + Live (annotated) Value +TestEmitInnerModules.Inner.+x + Live (annotated) Value +TestEmitInnerModules.Inner.+y + Live (annotated) Value +TestEmitInnerModules.Outer.Medium.Inner.+y + Live (annotated) Value +TestFirstClassModules.+convert + Live (annotated) Value +TestFirstClassModules.+convertInterface + Live (annotated) Value +TestFirstClassModules.+convertRecord + Live (annotated) Value +TestFirstClassModules.+convertFirstClassModuleWithTypeEquations + Live (annotated) Value +TestImmutableArray.+testImmutableArrayGet + Dead Value +TestImmutableArray.+testBeltArrayGet + Dead Value +TestImmutableArray.+testBeltArraySet + Live (annotated) Value +TestImport.+innerStuffContents + Live (annotated) Value +TestImport.+innerStuffContentsAsEmptyObject + Dead Value +TestImport.+innerStuffContents + Live (annotated) Value +TestImport.+valueStartingWithUpperCaseLetter + Live (annotated) Value +TestImport.+defaultValue + Dead RecordLabel +TestImport.message.text + Live (annotated) Value +TestImport.+make + Dead Value +TestImport.+make + Live (annotated) Value +TestImport.+defaultValue2 + Dead Value +TestInnedModuleTypes.+_ + Live (annotated) Value +TestModuleAliases.+testInner1 + Live (annotated) Value +TestModuleAliases.+testInner1Expanded + Live (annotated) Value +TestModuleAliases.+testInner2 + Live (annotated) Value +TestModuleAliases.+testInner2Expanded + Live (propagated) Value +TestOptArg.+foo + Live (external ref) Value +TestOptArg.+bar + Live (external ref) Value +TestOptArg.+notSuppressesOptArgs + Live (annotated) Value +TestOptArg.+liveSuppressesOptArgs + Dead RecordLabel +TestPromise.fromPayload.x + Live (external ref) RecordLabel +TestPromise.fromPayload.s + Dead RecordLabel +TestPromise.toPayload.result + Live (annotated) Value +TestPromise.+convert + Dead Value +ToSuppress.+toSuppress + Live (annotated) Value +TransitiveType1.+convert + Live (annotated) Value +TransitiveType1.+convertAlias + Dead Value +TransitiveType2.+convertT2 + Dead RecordLabel +TransitiveType3.t3.i + Dead RecordLabel +TransitiveType3.t3.s + Live (annotated) Value +TransitiveType3.+convertT3 + Live (annotated) Value +Tuples.+testTuple + Live (annotated) Value +Tuples.+origin + Live (annotated) Value +Tuples.+computeArea + Live (annotated) Value +Tuples.+computeAreaWithIdent + Live (annotated) Value +Tuples.+computeAreaNoConverters + Live (annotated) Value +Tuples.+coord2d + Live (external ref) RecordLabel +Tuples.person.name + Live (external ref) RecordLabel +Tuples.person.age + Live (annotated) Value +Tuples.+getFirstName + Live (annotated) Value +Tuples.+marry + Live (annotated) Value +Tuples.+changeSecondAge + Dead Value +TypeParams1.+exportSomething + Dead RecordLabel +TypeParams2.item.id + Dead Value +TypeParams2.+exportSomething + Live (annotated) Value +TypeParams3.+test + Live (annotated) Value +TypeParams3.+test2 + Live (annotated) Value +Types.+someIntList + Live (annotated) Value +Types.+map + Dead VariantCase +Types.typeWithVars.A + Dead VariantCase +Types.typeWithVars.B + Live (annotated) Value +Types.+swap + Live (external ref) RecordLabel +Types.selfRecursive.self + Live (external ref) RecordLabel +Types.mutuallyRecursiveA.b + Dead RecordLabel +Types.mutuallyRecursiveB.a + Live (annotated) Value +Types.+selfRecursiveConverter + Live (annotated) Value +Types.+mutuallyRecursiveConverter + Live (annotated) Value +Types.+testFunctionOnOptionsAsArgument + Dead VariantCase +Types.opaqueVariant.A + Dead VariantCase +Types.opaqueVariant.B + Live (annotated) Value +Types.+jsStringT + Live (annotated) Value +Types.+jsString2T + Live (annotated) Value +Types.+jsonStringify + Dead RecordLabel +Types.record.i + Dead RecordLabel +Types.record.s + Live (annotated) Value +Types.+testConvertNull + Live (annotated) Value +Types.+testMarshalFields + Live (annotated) Value +Types.+setMatch + Dead RecordLabel +Types.someRecord.id + Live (annotated) Value +Types.+testInstantiateTypeParameter + Live (annotated) Value +Types.+currentTime + Live (annotated) Value +Types.+i64Const + Live (annotated) Value +Types.+optFunction + Dead Value +Types.ObjectId.+x + Dead VariantCase +Unboxed.v1.A + Dead VariantCase +Unboxed.v2.A + Live (annotated) Value +Unboxed.+testV1 + Dead RecordLabel +Unboxed.r1.x + Dead VariantCase +Unboxed.r2.B + Dead RecordLabel +Unboxed.r2.B.g + Live (annotated) Value +Unboxed.+r2Test + Live (annotated) Value +Uncurried.+uncurried0 + Live (annotated) Value +Uncurried.+uncurried1 + Live (annotated) Value +Uncurried.+uncurried2 + Live (annotated) Value +Uncurried.+uncurried3 + Live (annotated) Value +Uncurried.+curried3 + Live (annotated) Value +Uncurried.+callback + Live (external ref) RecordLabel +Uncurried.auth.login + Live (external ref) RecordLabel +Uncurried.authU.loginU + Live (annotated) Value +Uncurried.+callback2 + Live (annotated) Value +Uncurried.+callback2U + Live (annotated) Value +Uncurried.+sumU + Live (annotated) Value +Uncurried.+sumU2 + Live (annotated) Value +Uncurried.+sumCurried + Live (annotated) Value +Uncurried.+sumLblCurried + Live (external ref) VariantCase +Unison.break.IfNeed + Live (external ref) VariantCase +Unison.break.Never + Live (external ref) VariantCase +Unison.break.Always + Live (external ref) RecordLabel +Unison.t.break + Live (external ref) RecordLabel +Unison.t.doc + Live (external ref) VariantCase +Unison.stack.Empty + Live (external ref) VariantCase +Unison.stack.Cons + Live (external ref) Value +Unison.+group + Live (propagated) Value +Unison.+fits + Live (external ref) Value +Unison.+toString + Live (annotated) Value +UseImportJsValue.+useGetProp + Live (annotated) Value +UseImportJsValue.+useTypeImportedInOtherModule + Live (annotated) Value +Variants.+isWeekend + Live (annotated) Value +Variants.+monday + Live (annotated) Value +Variants.+saturday + Live (annotated) Value +Variants.+sunday + Live (annotated) Value +Variants.+onlySunday + Live (annotated) Value +Variants.+swap + Live (annotated) Value +Variants.+testConvert + Live (annotated) Value +Variants.+fortytwoOK + Live (annotated) Value +Variants.+fortytwoBAD + Live (annotated) Value +Variants.+testConvert2 + Live (annotated) Value +Variants.+testConvert3 + Live (annotated) Value +Variants.+testConvert2to3 + Live (annotated) Value +Variants.+id1 + Live (annotated) Value +Variants.+id2 + Dead VariantCase +Variants.type_.Type + Live (annotated) Value +Variants.+polyWithOpt + Dead VariantCase +Variants.result1.Ok + Dead VariantCase +Variants.result1.Error + Live (annotated) Value +Variants.+restResult1 + Live (annotated) Value +Variants.+restResult2 + Live (annotated) Value +Variants.+restResult3 + Live (external ref) RecordLabel +VariantsWithPayload.payload.x + Live (external ref) RecordLabel +VariantsWithPayload.payload.y + Live (annotated) Value +VariantsWithPayload.+testWithPayload + Live (annotated) Value +VariantsWithPayload.+printVariantWithPayload + Live (annotated) Value +VariantsWithPayload.+testManyPayloads + Live (annotated) Value +VariantsWithPayload.+printManyPayloads + Dead VariantCase +VariantsWithPayload.simpleVariant.A + Dead VariantCase +VariantsWithPayload.simpleVariant.B + Dead VariantCase +VariantsWithPayload.simpleVariant.C + Live (annotated) Value +VariantsWithPayload.+testSimpleVariant + Dead VariantCase +VariantsWithPayload.variantWithPayloads.A + Dead VariantCase +VariantsWithPayload.variantWithPayloads.B + Dead VariantCase +VariantsWithPayload.variantWithPayloads.C + Dead VariantCase +VariantsWithPayload.variantWithPayloads.D + Dead VariantCase +VariantsWithPayload.variantWithPayloads.E + Live (annotated) Value +VariantsWithPayload.+testVariantWithPayloads + Live (annotated) Value +VariantsWithPayload.+printVariantWithPayloads + Dead VariantCase +VariantsWithPayload.variant1Int.R + Live (annotated) Value +VariantsWithPayload.+testVariant1Int + Dead VariantCase +VariantsWithPayload.variant1Object.R + Live (annotated) Value +VariantsWithPayload.+testVariant1Object Incorrect Dead Annotation DeadTest.res:153:1-28 diff --git a/tests/analysis_tests/tests-reanalyze/deadcode/package.json b/tests/analysis_tests/tests-reanalyze/deadcode/package.json index 2c294ed392..fdcd84d9ee 100644 --- a/tests/analysis_tests/tests-reanalyze/deadcode/package.json +++ b/tests/analysis_tests/tests-reanalyze/deadcode/package.json @@ -2,8 +2,8 @@ "name": "@tests/reanalyze-deadcode", "private": true, "scripts": { - "build": "rescript-legacy build", - "clean": "rescript-legacy clean" + "build": "rescript build", + "clean": "rescript clean" }, "dependencies": { "@rescript/react": "link:../../../dependencies/rescript-react", diff --git a/tests/analysis_tests/tests-reanalyze/deadcode/rescript.json b/tests/analysis_tests/tests-reanalyze/deadcode/rescript.json index 467c603309..942f2d957f 100644 --- a/tests/analysis_tests/tests-reanalyze/deadcode/rescript.json +++ b/tests/analysis_tests/tests-reanalyze/deadcode/rescript.json @@ -5,7 +5,7 @@ "unsuppress": [], "transitive": true }, - "name": "sample-typescript-app", + "name": "@tests/reanalyze-deadcode", "jsx": { "version": 4 }, "dependencies": ["@rescript/react"], "sources": [ diff --git a/tests/dependencies/rescript-react/package.json b/tests/dependencies/rescript-react/package.json index cd0136bdd3..eaf7dd05a3 100644 --- a/tests/dependencies/rescript-react/package.json +++ b/tests/dependencies/rescript-react/package.json @@ -1,4 +1,29 @@ { - "name": "@tests/rescript-react", - "private": true + "name": "@rescript/react", + "private": true, + "version": "12.0.2", + "homepage": "https://rescript-lang.org", + "bugs": "https://github.com/rescript-lang/rescript/issues", + "repository": { + "type": "git", + "url": "git+https://github.com/rescript-lang/rescript.git" + }, + "author": { + "name": "Hongbo Zhang", + "email": "bobzhang1988@gmail.com" + }, + "maintainers": [ + "Christoph Knittel (https://github.com/cknitt)", + "Cristiano Calcagno (https://github.com/cristianoc)", + "Dmitry Zakharov (https://github.com/DZakh)", + "Florian Hammerschmidt (https://github.com/fhammerschmidt)", + "Gabriel Nordeborn (https://github.com/zth)", + "Hyeseong Kim (https://github.com/cometkim)", + "Jaap Frolich (https://github.com/jfrolich)", + "Matthias Le Brun (https://github.com/bloodyowl)", + "Patrick Ecker (https://github.com/ryyppy)", + "Paul Tsnobiladzé (https://github.com/tsnobip)", + "Woonki Moon (https://github.com/mununki)" + ], + "preferUnplugged": true } diff --git a/yarn.lock b/yarn.lock index 573bc9b2a6..3db8c9bc0e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -452,6 +452,12 @@ __metadata: languageName: node linkType: hard +"@rescript/react@workspace:tests/dependencies/rescript-react": + version: 0.0.0-use.local + resolution: "@rescript/react@workspace:tests/dependencies/rescript-react" + languageName: unknown + linkType: soft + "@rescript/runtime@workspace:packages/@rescript/runtime": version: 0.0.0-use.local resolution: "@rescript/runtime@workspace:packages/@rescript/runtime" @@ -724,12 +730,6 @@ __metadata: languageName: unknown linkType: soft -"@tests/rescript-react@workspace:tests/dependencies/rescript-react": - version: 0.0.0-use.local - resolution: "@tests/rescript-react@workspace:tests/dependencies/rescript-react" - languageName: unknown - linkType: soft - "@tests/tools@workspace:tests/tools_tests": version: 0.0.0-use.local resolution: "@tests/tools@workspace:tests/tools_tests"