package lrgrep

  1. Overview
  2. Docs
Legend:
Page
Library
Module
Module type
Parameter
Class
Class type
Source

Source file enumeration.ml

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
open Utils
open Misc
open Fix.Indexing
open Info

(* Computation of free failures:

   Given a set of stacks and an initial reduction, find each lookahead that end
   up being rejected by at least one stack of the set.
*)

(* A failure node associates to a goto transition the set of lookaheads that can
   be rejected (directly or not). *)
type ('g, 'lrc) kernel = {
  (* The lrc index represent the set of stacks ending in this state. *)
  lrc: 'lrc index;

  (* The nonterminal labelling the goto transition to follow from these stacks,
     or none if the if the stacks should be considered directly. *)
  nto: 'g nonterminal opt index;

  (* Lookahead *)
  lookahead: 'g terminal indexset;
}

let kernel lrc ?goto lookahead =
  let nto = Opt.(Option.fold ~none ~some goto) in
  {lrc; nto; lookahead}

type ('lrc, 'n) edge = {
  path: 'lrc index list;
  source: 'n index;
  target: 'n index;
}

type ('g, 'lrc, 'a, 'n) _graph = {
  ker : ('n, ('g, 'lrc) kernel) vector;
  fwd : ('n, ('lrc, 'n) edge list) vector;
  bkd : ('n, ('lrc, 'n) edge list) vector;
  entries: 'a array;
}

type ('g, 'lrc, 'a) graph =
    Graph : ('g, 'lrc, 'a, 'n) _graph -> ('g, 'lrc, 'a) graph

(* Staged and cached lazy computation for construction the graph of failure nodes:
   1. [let finder = free_failures grammar stacks rcs]
      lazily constructs the graph
   2. [finder lrcs nt depth] is the list of failure nodes reachable by following
      a goto transition labelled [nt] [depth] states deep in the stacks
      described by [lrcs].
*)

let rec fold_expand expand env f acc = function
  | [] -> acc
  | [x] -> f env acc x
  | x :: xs ->
    let acc = f env acc x in
    let env = expand env in
    fold_expand expand env f acc xs

let make_graph (type g lrc a)
    (grammar : g grammar)
    (rcs : (g lr1, g Redgraph.reduction_closure) vector)
    (stacks : (g, lrc) Automata.stacks)
    (entries : ((g, lrc) kernel * a) list)
  =
  let open IndexBuffer in
  let module Nodes = Gen.Make() in
  let nodes = Nodes.get_generator () in
  let fwd = Dyn.make [] in
  let table = Hashtbl.create 500 in
  let rec synthesize (node : Nodes.n index) ker =
    let paths, tgt =
      let lr1 = stacks.label ker.lrc in
      match Opt.prj ker.nto with
      | None ->
        IndexSet.fold
          (fun lrc acc -> (lrc, [lrc]) :: acc)
          (stacks.prev ker.lrc) [],
        lr1
      | Some nt ->
        ([ker.lrc, []], Transition.find_goto_target grammar lr1 nt)
    in
    let rc = rcs.:(tgt) in
    let explore_paths paths acc nts =
      IndexMap.fold begin fun nt lookahead' acc ->
        let lookahead = IndexSet.inter ker.lookahead lookahead' in
        if IndexSet.is_not_empty lookahead then
          List.fold_left begin fun acc (lrc', path) ->
            let target = get_node {lrc = lrc'; nto = Opt.some nt; lookahead} in
            {source=node; target; path} :: acc
          end acc paths
        else
          acc
      end nts acc
    in
    let expand_paths paths =
      List.fold_left begin fun acc (lrc0, path) ->
        IndexSet.fold
          (fun lrc acc -> (lrc, lrc :: path) :: acc)
          (stacks.prev lrc0) acc
      end [] paths
    in
    Dyn.set fwd node (fold_expand expand_paths paths explore_paths [] rc.reductions)

  and get_node ker =
    assert (IndexSet.is_not_empty ker.lookahead);
    match Hashtbl.find_opt table ker with
    | Some node -> node
    | None ->
      let node = Gen.add nodes ker in
      Hashtbl.add table ker node;
      synthesize node ker;
      node
  in
  let entry_nodes = List.map (fun (ker, _) -> Gen.add nodes ker) entries in
  List.iter (fun i -> synthesize i (Gen.get nodes i)) entry_nodes;
  let ker = Gen.freeze nodes in
  let fwd = Dyn.contents fwd Nodes.n in
  let bkd = Vector.make Nodes.n [] in
  Vector.iter (List.iter (fun edge -> bkd.@(edge.target) <- List.cons edge)) fwd;
  Graph {entries=Array.of_list (List.map snd entries); ker; fwd; bkd}

let get_lr1_state grammar (stacks : _ Automata.stacks) ker =
  let lr1 = stacks.label ker.lrc in
  match Opt.prj ker.nto with
  | None -> lr1
  | Some nt -> Transition.find_goto_target grammar lr1 nt

let get_lr0_state grammar (stacks : _ Automata.stacks) ker =
  Lr1.to_lr0 grammar (get_lr1_state grammar stacks ker)

(* Analysis of reachable lookaheads *)

let get_failing grammar stacks rcs ker =
  rcs.:(get_lr1_state grammar stacks ker).Redgraph.failing

type ('g, 'lrc, 'a, 'n) failing_sentence = {
  first: 'n index;
  pattern: 'g lr0 index;
  edges: ('lrc, 'n) edge list;
  failing: 'g terminal indexset;
  entry: 'a;
}

let make_failing_sentence gr (first, pattern, edges, failing) =
  let index = Index.to_int (List.fold_left (fun _ edge -> edge.source) first edges) in
  assert (index < Array.length gr.entries);
  {first; pattern; edges; failing; entry = gr.entries.(index)}

let cover_with_maximal_patterns grammar rcs stacks gr =
  let results = ref [] in
  let todo = ref (
      List.init (Array.length gr.entries)
        (fun i -> (Index.of_int (Vector.length gr.ker) i, [], IndexSet.empty))
    )
  in
  let covered = Vector.make (Lr0.cardinal grammar) IndexSet.empty in
  let emit node path failing =
    let ker = gr.ker.:(node) in
    let rec visit_stacks candidate = function
      | {Redgraph. subs = []} ->
        let lr0 = Lr1.to_lr0 grammar candidate in
        let covered0 = covered.:(lr0) in
        let covered' = IndexSet.union failing covered0 in
        if covered' != covered0 then (
          covered.:(lr0) <- covered';
          push results (make_failing_sentence gr (node, lr0, path, failing))
        )
      | {Redgraph.subs} ->
        List.iter (fun (stack, _la, subs) ->
            visit_stacks (List.hd stack) subs
          ) subs
    in
    let lr1 = get_lr1_state grammar stacks ker in
    visit_stacks lr1 rcs.:(lr1).Redgraph.stacks
  in
  let marked = Boolvector.make (Vector.length gr.ker) false in
  let visited = Vector.make (Vector.length gr.ker) IndexSet.empty in

  let propagate (node, path, failing) =
    let ker = gr.ker.:(node) in
    let failing = IndexSet.union (get_failing grammar stacks rcs ker) failing in
    if not (Boolvector.test marked node) || not (IndexSet.equal visited.:(node) failing) then (
      Boolvector.set marked node;
      visited.@(node) <- IndexSet.union failing;
      match gr.fwd.:(node) with
      | [] ->
        if IndexSet.is_not_empty failing then
          emit node path failing
      | edges ->
        List.iter begin fun edge ->
          push todo (edge.target, edge :: path, failing)
        end edges
    )
  in
  fixpoint ~propagate todo;
  !results

type ('g, 'a) dispenser = {
  fallible0: ('g lr0, 'g terminal indexset) vector;
  mutable next : 'a Seq.t;
}

let mark_covered disp lr0 la =
  disp.fallible0.@(lr0) <- IndexSet.union la

let mark_sentence_covered g stacks gr disp {first; edges; failing; _} =
  let mark node = mark_covered disp (get_lr0_state g stacks gr.ker.:(node)) failing in
  mark first;
  List.iter (fun edge -> mark edge.source) edges

let next disp =
  let result, next = match disp.next () with
    | Seq.Nil -> (None, Seq.empty)
    | Seq.Cons (x, xs) -> (Some x, xs)
  in
  disp.next <- next;
  result

let to_seq disp = disp.next

let cover_all (type g n) grammar rcs stacks (gr : (g, _, _, n) _graph) =
  let disp = {
    fallible0 = Vector.make (Lr0.cardinal grammar) IndexSet.empty;
    next = Seq.empty;
  } in
  disp.next <- begin fun () ->
    let n = Vector.length gr.ker in
    let visited_prefix = Boolvector.make n false in
    let visited_suffix = Boolvector.make n false in
    let fallible = Vector.make n IndexSet.empty in
    let prefixes = Vector.make n [] in
    let suffixes = Vector.make n [] in
    let shortest_prefix = Vector.make n [] in
    let shortest_suffix = Vector.make n [] in
    let todo = ref [] in
    let propagate (dir, node, path, failing) =
      let ker = gr.ker.:(node) in
      let failing = IndexSet.union failing (get_failing grammar stacks rcs ker) in
      (* Remember the shortest paths, find where to mark the visit status *)
      let visited =
        match dir with
        | `Prefix ->
          if list_is_empty shortest_prefix.:(node) then
            shortest_prefix.:(node) <- path;
          visited_prefix
        | `Suffix ->
          if list_is_empty shortest_suffix.:(node) then
            shortest_suffix.:(node) <- path;
          visited_suffix
      in
      let fallible' = IndexSet.union failing fallible.:(node) in
      if not (Boolvector.test visited node) || fallible' != fallible.:(node) then (
        Boolvector.set visited node;
        fallible.:(node) <- fallible';
        let lr0 = get_lr0_state grammar stacks ker in
        let fallible0' = IndexSet.diff failing disp.fallible0.:(lr0) in
        (* Save path if it is the first to cover some lookahead *)
        if IndexSet.is_not_empty fallible0' then (
          disp.fallible0.@(lr0) <- IndexSet.union fallible0';
          let sentences = match dir with
            | `Prefix -> prefixes
            | `Suffix -> suffixes
          in
          sentences.@(node) <- List.cons (path, failing);
        );
        (* Extend path with successors *)
        let prj, list = match dir with
          | `Prefix -> ((fun edge -> edge.source), gr.bkd.:(node))
          | `Suffix -> ((fun edge -> edge.target), gr.fwd.:(node))
        in
        List.iter (fun edge -> push todo (dir, prj edge, edge :: path, failing)) list
      );
    in
    Index.iter n begin fun node ->
      if list_is_empty gr.bkd.:(node) then
        propagate (`Suffix, node, [], IndexSet.empty)
      else if list_is_empty gr.fwd.:(node) then
        propagate (`Prefix, node, [], IndexSet.empty)
    end;
    fixpoint ~propagate todo;
    Index.iter n begin fun node ->
      if not (list_is_empty gr.bkd.:(node)) then
        assert (not (list_is_empty shortest_suffix.:(node)));
      if not (list_is_empty gr.fwd.:(node)) then
        assert (not (list_is_empty shortest_prefix.:(node)));
    end;
    Index.init_seq n begin fun node () ->
      let output (prefix, pfail) (suffix, sfail) =
        let failing = IndexSet.union pfail sfail in
        let sentence = List.rev_append prefix suffix in
        let first = match sentence with
          | [] -> node
          | x :: _ -> x.target
        in
        (first, sentence, failing)
      in
      let sprefix = shortest_prefix.:(node) in
      let ssuffix = shortest_suffix.:(node) in
      let output_prefixes prefixes =
        List.to_seq prefixes
        |> Seq.map (fun prefix' -> output prefix' (ssuffix, IndexSet.empty))
      in
      let output_suffixes suffixes =
        List.to_seq suffixes
        |> Seq.map (fun suffix' -> output (sprefix, IndexSet.empty) suffix')
      in
      match List.rev prefixes.:(node), suffixes.:(node) with
      | prefix0 :: prefixes, suffix0 :: suffixes ->
        Seq.Cons (output prefix0 suffix0,
                  Seq.append
                    (output_prefixes prefixes)
                    (output_suffixes suffixes))
      | prefixes, suffixes ->
        Seq.append (output_prefixes prefixes) (output_suffixes suffixes) ()
    end
    |> Seq.concat
    |> Seq.filter_map (fun (node, edges, failing) ->
        let productive = ref false in
        let check node =
          let lr0 = get_lr0_state grammar stacks gr.ker.:(node) in
          let fallible = disp.fallible0.:(lr0) in
          let fallible' = IndexSet.diff fallible failing in
          if fallible != fallible' then (
            disp.fallible0.:(lr0) <- fallible';
            productive := true;
          )
        in
        check node;
        List.iter (fun edge -> check edge.source) edges;
        if !productive then
          Some (node, get_lr0_state grammar stacks gr.ker.:(node), edges, failing)
        else
          None
      )
    |> Seq.map (make_failing_sentence gr)
    |> (fun seq -> seq ())
  end;
  disp

(* Strategy for enumeration

   - Construct the graph using [get_node] to inject each entry point.
   - Use [cover_entries_with_maximal_patterns] to produce suffixes starting from
     all entrypoints simulateneously and covering all lookaheads.

   This is sufficient for maximal patterns, but if we want exhaustive coverage,
   we need a second pass:

   - We take the entry nodes and the suffixes produced so far
   - We visit all the LR(0) reachable from entry nodes to gather
     all the lookaheads with which they can be reached
   - We visit all suffixes to remove the lookaheads that are already covered
   - We do a BFS to collect suffixes reaching LR(0) states we still have to
     cover.
   - We do a BFS to collect prefixes reaching LR(0) states we still have to
     cover.
   - Then for each LR(0) state we still have to cover, pick enough prefixes and
     suffixes to cover everything, update remaining things to cover.


   For this we will construct do a BFS, reified as a tree, in which in each
   branch we commit to covering the yet uncovered lookaheads.
   When outputting a sentence, we drop the non-productive (not covering anything
   new) prefix, then we update all other branches of the BFS to drop the already
   committed lookaheads.  Woooo...

   Then for printing, we group sentences by their final lr0 state, which
   represent the right pattern.
*)

(* Group sentences by patterns

   let order (lr0, _) (lr0', _) =
    match Index.compare lr0 lr0' with
    | 0 ->
      (* Same lr0 *)
      0
    | c0 ->
      (* Not the same:
   - Order first by decreasing number of items
   - If same number of items, fall back to an (arbitrary) total order
           induced by LR(0) state number *)
      match Int.compare
              (IndexSet.cardinal (Lr0.items gr.grammar lr0))
              (IndexSet.cardinal (Lr0.items gr.grammar lr0'))
      with
      | 0 -> c0
      | c -> c
     in
     group_by !by_lr0 ~compare:order
     ~group:(fun (lr0, node) rest -> (lr0, node :: List.map snd rest))

  List.iter begin fun (lr0, nodes) ->
    let sentences = extract_suffixes nodes in
    let lhs =
      match Lr0.incoming gr.grammar lr0 with
      | Some sym when Symbol.is_nonterminal gr.grammar sym ->
        "[_ "
      | Some _ | None ->
        "["
    in
    let pad = String.make (String.length lhs) ' ' in
    let lines =
      let items = IndexSet.elements (Lr0.items gr.grammar lr0) in
      List.mapi (fun i item ->
          let filter = Item.to_string gr.grammar item in
          if i = 0 then
            lhs ^ "/" ^ filter
          else
            pad ^ "/" ^ filter
        ) items
    in
    let lines = String.concat "\n" lines ^ "]" in
    print_endline lines;
    List.iter begin fun (node, edges, handled, failing) ->
      let suffix = List.fold_left
          (fun path edge -> edge.target.ker.lrc :: List.rev_append edge.path path)
          [node.ker.lrc] edges
      in
      let base = List.hd suffix in
      let complete = List.rev_append (prefix base) suffix in
      let sentence = List.map gr.stacks.label complete in
      let sentence = List.map (Lr1.to_string gr.grammar) sentence in
      print_endline (String.concat " " sentence);
      print_endline ("  for unique lookaheads: " ^ Terminal.lookaheads_to_string gr.grammar handled);
      let failing = IndexSet.diff failing handled in
      if IndexSet.is_not_empty failing then
        print_endline ("  for redundant lookaheads: " ^ Terminal.lookaheads_to_string gr.grammar failing);
    end sentences;
  end by_lr0
*)