1/*
2 * Copyright (c) 2016-2017, Intel Corporation
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are met:
6 *
7 * * Redistributions of source code must retain the above copyright notice,
8 * this list of conditions and the following disclaimer.
9 * * Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
12 * * Neither the name of Intel Corporation nor the names of its contributors
13 * may be used to endorse or promote products derived from this software
14 * without specific prior written permission.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
17 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
18 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
20 * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
27 */
28
29/**
30 * \file
31 * \brief Rose build: code for analysing literal groups.
32 */
33
34#include "rose_build_groups.h"
35
36#include "util/boundary_reports.h"
37#include "util/compile_context.h"
38#include "util/report_manager.h"
39
40#include <queue>
41#include <vector>
42
43#include <boost/graph/topological_sort.hpp>
44#include <boost/range/adaptor/map.hpp>
45#include <boost/range/adaptor/reversed.hpp>
46
47using namespace std;
48using boost::adaptors::map_keys;
49
50namespace ue2 {
51
52#define ROSE_LONG_LITERAL_LEN 8
53
54static
55bool superStrong(const rose_literal_id &lit) {
56 if (lit.s.length() < ROSE_LONG_LITERAL_LEN) {
57 return false;
58 }
59
60 const u32 EXPECTED_FDR_BUCKET_LENGTH = 8;
61
62 assert(lit.s.length() >= EXPECTED_FDR_BUCKET_LENGTH);
63 size_t len = lit.s.length();
64 const string &s = lit.s.get_string();
65
66 for (size_t i = 1; i < EXPECTED_FDR_BUCKET_LENGTH; i++) {
67 if (s[len - 1 - i] != s[len - 1]) {
68 return true; /* we have at least some variation in the tail */
69 }
70 }
71 DEBUG_PRINTF("lit '%s' is not superstrong due to tail\n",
72 escapeString(s).c_str());
73 return false;
74}
75
76static
77bool eligibleForAlwaysOnGroup(const RoseBuildImpl &build, u32 id) {
78 auto eligble = [&](RoseVertex v) {
79 return build.isRootSuccessor(v)
80 && (!build.g[v].left || !isAnchored(build.g[v].left));
81 };
82
83 if (any_of_in(build.literal_info[id].vertices, eligble)) {
84 return true;
85 }
86
87 for (u32 delayed_id : build.literal_info[id].delayed_ids) {
88 if (any_of_in(build.literal_info[delayed_id].vertices, eligble)) {
89 return true;
90 }
91 }
92
93 return false;
94}
95
96static
97bool requires_group_assignment(const rose_literal_id &lit,
98 const rose_literal_info &info) {
99 if (lit.delay) { /* we will check the shadow's master */
100 return false;
101 }
102
103 if (lit.table == ROSE_ANCHORED || lit.table == ROSE_EVENT) {
104 return false;
105 }
106
107 // If we already have a group applied, skip.
108 if (info.group_mask) {
109 return false;
110 }
111
112 if (info.vertices.empty() && info.delayed_ids.empty()) {
113 DEBUG_PRINTF("literal is good for nothing\n");
114 return false;
115 }
116
117 return true;
118}
119
120static
121rose_group calcLocalGroup(const RoseVertex v, const RoseGraph &g,
122 const deque<rose_literal_info> &literal_info,
123 const bool small_literal_count) {
124 rose_group local_group = 0;
125
126 for (auto u : inv_adjacent_vertices_range(v, g)) {
127 /* In small cases, ensure that siblings have the same rose parentage to
128 * allow rose squashing. In larger cases, don't do this as groups are
129 * probably too scarce. */
130 for (auto w : adjacent_vertices_range(u, g)) {
131 if (!small_literal_count || g[v].left == g[w].left) {
132 for (u32 lit_id : g[w].literals) {
133 local_group |= literal_info[lit_id].group_mask;
134 }
135 } else {
136 DEBUG_PRINTF("not sibling different mother %zu %zu\n",
137 g[v].index, g[w].index);
138 }
139 }
140 }
141
142 return local_group;
143}
144
145/* group constants */
146#define MAX_LIGHT_LITERAL_CASE 200 /* allow rose to affect group decisions below
147 * this */
148
149static
150flat_set<RoseVertex> getAssociatedVertices(const RoseBuildImpl &build, u32 id) {
151 flat_set<RoseVertex> out;
152 const auto &info = build.literal_info[id];
153 insert(&out, info.vertices);
154 for (const auto &delayed : info.delayed_ids) {
155 insert(&out, build.literal_info[delayed].vertices);
156 }
157 return out;
158}
159
160static
161u32 next_available_group(u32 counter, u32 min_start_group) {
162 counter++;
163 if (counter == ROSE_GROUPS_MAX) {
164 DEBUG_PRINTF("resetting groups\n");
165 counter = min_start_group;
166 }
167
168 return counter;
169}
170
171static
172void allocateGroupForBoundary(RoseBuildImpl &build, u32 group_always_on,
173 map<u8, u32> &groupCount) {
174 /* Boundary reports at zero will always fired and forgotten, no need to
175 * worry about preventing the stream being marked as exhausted */
176 if (build.boundary.report_at_eod.empty()) {
177 return;
178 }
179
180 /* Group based stream exhaustion is only done at stream boundaries */
181 if (!build.cc.streaming) {
182 return;
183 }
184
185 DEBUG_PRINTF("allocating %u as boundary group id\n", group_always_on);
186
187 build.boundary_group_mask = 1ULL << group_always_on;
188 groupCount[group_always_on]++;
189}
190
191static
192void allocateGroupForEvent(RoseBuildImpl &build, u32 group_always_on,
193 map<u8, u32> &groupCount, u32 *counter) {
194 if (build.eod_event_literal_id == MO_INVALID_IDX) {
195 return;
196 }
197
198 /* Group based stream exhaustion is only done at stream boundaries */
199 if (!build.cc.streaming) {
200 return;
201 }
202
203 rose_literal_info &info = build.literal_info[build.eod_event_literal_id];
204
205 if (info.vertices.empty()) {
206 return;
207 }
208
209 bool new_group = !groupCount[group_always_on];
210 for (RoseVertex v : info.vertices) {
211 if (build.g[v].left && !isAnchored(build.g[v].left)) {
212 new_group = false;
213 }
214 }
215
216 u32 group;
217 if (!new_group) {
218 group = group_always_on;
219 } else {
220 group = *counter;
221 *counter += 1;
222 }
223
224 DEBUG_PRINTF("allocating %u as eod event group id\n", *counter);
225 info.group_mask = 1ULL << group;
226 groupCount[group]++;
227}
228
229void assignGroupsToLiterals(RoseBuildImpl &build) {
230 auto &literals = build.literals;
231 auto &literal_info = build.literal_info;
232
233 bool small_literal_count = literal_info.size() <= MAX_LIGHT_LITERAL_CASE;
234
235 map<u8, u32> groupCount; /* group index to number of members */
236
237 u32 counter = 0;
238 u32 group_always_on = 0;
239
240 // First pass: handle always on literals.
241 for (u32 id = 0; id < literals.size(); id++) {
242 const rose_literal_id &lit = literals.at(id);
243 rose_literal_info &info = literal_info[id];
244
245 if (!requires_group_assignment(lit, info)) {
246 continue;
247 }
248
249 // If this literal has a root role, we always have to search for it
250 // anyway, so it goes in the always-on group.
251 /* We could end up squashing it if it is followed by a .* */
252 if (eligibleForAlwaysOnGroup(build, id)) {
253 info.group_mask = 1ULL << group_always_on;
254 groupCount[group_always_on]++;
255 continue;
256 }
257 }
258
259 u32 group_long_lit;
260 if (groupCount[group_always_on]) {
261 DEBUG_PRINTF("%u always on literals\n", groupCount[group_always_on]);
262 group_long_lit = group_always_on;
263 counter++;
264 } else {
265 group_long_lit = counter;
266 counter++;
267 }
268
269 allocateGroupForBoundary(build, group_always_on, groupCount);
270 allocateGroupForEvent(build, group_always_on, groupCount, &counter);
271
272 u32 min_start_group = counter;
273 priority_queue<tuple<s32, s32, u32>> pq;
274
275 // Second pass: the other literals.
276 for (u32 id = 0; id < literals.size(); id++) {
277 const rose_literal_id &lit = literals.at(id);
278 rose_literal_info &info = literal_info[id];
279
280 if (!requires_group_assignment(lit, info)) {
281 continue;
282 }
283
284 assert(!eligibleForAlwaysOnGroup(build, id));
285 pq.emplace(-(s32)info.vertices.size(), -(s32)lit.s.length(), id);
286 }
287 vector<u32> long_lits;
288 while (!pq.empty()) {
289 u32 id = get<2>(pq.top());
290 pq.pop();
291 UNUSED const rose_literal_id &lit = literals.at(id);
292 DEBUG_PRINTF("assigning groups to lit %u (v %zu l %zu)\n", id,
293 literal_info[id].vertices.size(), lit.s.length());
294
295 u8 group_id = 0;
296 rose_group group = ~0ULL;
297 for (auto v : getAssociatedVertices(build, id)) {
298 rose_group local_group = calcLocalGroup(v, build.g, literal_info,
299 small_literal_count);
300 group &= local_group;
301 if (!group) {
302 break;
303 }
304 }
305
306 if (group == ~0ULL) {
307 goto boring;
308 }
309
310 group &= ~((1ULL << min_start_group) - 1); /* ensure the purity of the
311 * always_on groups */
312 if (!group) {
313 goto boring;
314 }
315
316 group_id = ctz64(group);
317
318 /* TODO: fairness */
319 DEBUG_PRINTF("picking sibling group %hhd\n", group_id);
320 literal_info[id].group_mask = 1ULL << group_id;
321 groupCount[group_id]++;
322
323 continue;
324
325 boring:
326 /* long literals will either be stuck in a mega group or spread around
327 * depending on availability */
328 if (superStrong(lit)) {
329 long_lits.push_back(id);
330 continue;
331 }
332
333 // Other literals are assigned to our remaining groups round-robin.
334 group_id = counter;
335
336 DEBUG_PRINTF("picking boring group %hhd\n", group_id);
337 literal_info[id].group_mask = 1ULL << group_id;
338 groupCount[group_id]++;
339 counter = next_available_group(counter, min_start_group);
340 }
341
342 /* spread long literals out amongst unused groups if any, otherwise stick
343 * them in the always on the group */
344
345 if (groupCount[counter]) {
346 DEBUG_PRINTF("sticking long literals in the image of the always on\n");
347 for (u32 lit_id : long_lits) {
348 literal_info[lit_id].group_mask = 1ULL << group_long_lit;
349 groupCount[group_long_lit]++;
350 }
351 } else {
352 u32 min_long_counter = counter;
353 DEBUG_PRINTF("base long lit group = %u\n", min_long_counter);
354 for (u32 lit_id : long_lits) {
355 u8 group_id = counter;
356 literal_info[lit_id].group_mask = 1ULL << group_id;
357 groupCount[group_id]++;
358 counter = next_available_group(counter, min_long_counter);
359 }
360 }
361 /* assign delayed literals to the same group as their parent */
362 for (u32 id = 0; id < literals.size(); id++) {
363 const rose_literal_id &lit = literals.at(id);
364
365 if (!lit.delay) {
366 continue;
367 }
368
369 u32 parent = literal_info[id].undelayed_id;
370 DEBUG_PRINTF("%u is shadow picking up groups from %u\n", id, parent);
371 assert(literal_info[parent].undelayed_id == parent);
372 assert(literal_info[parent].group_mask);
373 literal_info[id].group_mask = literal_info[parent].group_mask;
374 /* don't increment the group count - these don't really exist */
375 }
376
377 DEBUG_PRINTF("populate group to literal mapping\n");
378 for (u32 id = 0; id < literals.size(); id++) {
379 rose_group groups = literal_info[id].group_mask;
380 while (groups) {
381 u32 group_id = findAndClearLSB_64(&groups);
382 build.group_to_literal[group_id].insert(id);
383 }
384 }
385
386 /* find how many groups we allocated */
387 for (u32 i = 0; i < ROSE_GROUPS_MAX; i++) {
388 if (groupCount[i]) {
389 build.group_end = max(build.group_end, i + 1);
390 }
391 }
392}
393
394rose_group RoseBuildImpl::getGroups(RoseVertex v) const {
395 rose_group groups = 0;
396
397 for (u32 id : g[v].literals) {
398 u32 lit_id = literal_info.at(id).undelayed_id;
399
400 rose_group mygroups = literal_info[lit_id].group_mask;
401 groups |= mygroups;
402 }
403
404 return groups;
405}
406
407/** \brief Get the groups of the successor literals of a given vertex. */
408rose_group RoseBuildImpl::getSuccGroups(RoseVertex start) const {
409 rose_group initialGroups = 0;
410
411 for (auto v : adjacent_vertices_range(start, g)) {
412 initialGroups |= getGroups(v);
413 }
414
415 return initialGroups;
416}
417
418/**
419 * The groups that a role sets are determined by the union of its successor
420 * literals. Requires the literals already have had groups assigned.
421 */
422void assignGroupsToRoles(RoseBuildImpl &build) {
423 auto &g = build.g;
424
425 /* Note: if there is a succ literal in the sidematcher, its successors
426 * literals must be added instead */
427 for (auto v : vertices_range(g)) {
428 if (build.isAnyStart(v)) {
429 continue;
430 }
431
432 const rose_group succ_groups = build.getSuccGroups(v);
433 g[v].groups |= succ_groups;
434
435 auto ghost_it = build.ghost.find(v);
436 if (ghost_it != end(build.ghost)) {
437 /* delayed roles need to supply their groups to the ghost role */
438 g[ghost_it->second].groups |= succ_groups;
439 }
440
441 DEBUG_PRINTF("vertex %zu: groups=%llx\n", g[v].index, g[v].groups);
442 }
443}
444
445/**
446 * \brief Returns a mapping from each graph vertex v to the intersection of the
447 * groups switched on by all of the paths leading up to (and including) v from
448 * the start vertexes.
449 */
450unordered_map<RoseVertex, rose_group>
451getVertexGroupMap(const RoseBuildImpl &build) {
452 const RoseGraph &g = build.g;
453 vector<RoseVertex> v_order;
454 v_order.reserve(num_vertices(g));
455
456 boost::topological_sort(g, back_inserter(v_order));
457
458 unordered_map<RoseVertex, rose_group> vertex_group_map;
459 vertex_group_map.reserve(num_vertices(g));
460
461 const rose_group initial_groups = build.getInitialGroups();
462
463 for (const auto &v : boost::adaptors::reverse(v_order)) {
464 DEBUG_PRINTF("vertex %zu\n", g[v].index);
465
466 if (build.isAnyStart(v)) {
467 DEBUG_PRINTF("start vertex, groups=0x%llx\n", initial_groups);
468 vertex_group_map.emplace(v, initial_groups);
469 continue;
470 }
471
472 // To get to this vertex, we must have come through a predecessor, and
473 // everyone who isn't a start vertex has one.
474 assert(in_degree(v, g) > 0);
475 rose_group pred_groups = ~rose_group{0};
476 for (auto u : inv_adjacent_vertices_range(v, g)) {
477 DEBUG_PRINTF("pred %zu\n", g[u].index);
478 assert(contains(vertex_group_map, u));
479 pred_groups &= vertex_group_map.at(u);
480 }
481
482 DEBUG_PRINTF("pred_groups=0x%llx\n", pred_groups);
483 DEBUG_PRINTF("g[v].groups=0x%llx\n", g[v].groups);
484
485 rose_group v_groups = pred_groups | g[v].groups;
486 DEBUG_PRINTF("v_groups=0x%llx\n", v_groups);
487
488 vertex_group_map.emplace(v, v_groups);
489 }
490
491 return vertex_group_map;
492}
493
494/**
495 * \brief Find the set of groups that can be squashed anywhere in the graph,
496 * either by a literal or by a leftfix.
497 */
498rose_group getSquashableGroups(const RoseBuildImpl &build) {
499 rose_group squashable_groups = 0;
500 for (const auto &info : build.literal_info) {
501 if (info.squash_group) {
502 DEBUG_PRINTF("lit squash mask 0x%llx\n", info.group_mask);
503 squashable_groups |= info.group_mask;
504 }
505 }
506 for (const auto &m : build.rose_squash_masks) {
507 DEBUG_PRINTF("left squash mask 0x%llx\n", ~m.second);
508 squashable_groups |= ~m.second;
509 }
510
511 DEBUG_PRINTF("squashable groups=0x%llx\n", squashable_groups);
512 assert(!(squashable_groups & build.boundary_group_mask));
513 return squashable_groups;
514}
515
516/**
517 * \brief True if every vertex associated with a group also belongs to
518 * lit_info.
519 */
520static
521bool coversGroup(const RoseBuildImpl &build,
522 const rose_literal_info &lit_info) {
523 if (lit_info.vertices.empty()) {
524 DEBUG_PRINTF("no vertices - does not cover\n");
525 return false;
526 }
527
528 if (!lit_info.group_mask) {
529 DEBUG_PRINTF("no group - does not cover\n");
530 return false; /* no group (not a floating lit?) */
531 }
532
533 assert(popcount64(lit_info.group_mask) == 1);
534
535 /* for each lit in group, ensure that vertices are a subset of lit_info's */
536 rose_group groups = lit_info.group_mask;
537 while (groups) {
538 u32 group_id = findAndClearLSB_64(&groups);
539 for (u32 id : build.group_to_literal.at(group_id)) {
540 DEBUG_PRINTF(" checking against friend %u\n", id);
541 if (!is_subset_of(build.literal_info[id].vertices,
542 lit_info.vertices)) {
543 DEBUG_PRINTF("fail\n");
544 return false;
545 }
546 }
547 }
548
549 DEBUG_PRINTF("ok\n");
550 return true;
551}
552
553static
554bool isGroupSquasher(const RoseBuildImpl &build, const u32 id /* literal id */,
555 rose_group forbidden_squash_group) {
556 const RoseGraph &g = build.g;
557
558 const rose_literal_info &lit_info = build.literal_info.at(id);
559
560 DEBUG_PRINTF("checking if %u '%s' is a group squasher %016llx\n", id,
561 dumpString(build.literals.at(id).s).c_str(),
562 lit_info.group_mask);
563
564 if (build.literals.at(id).table == ROSE_EVENT) {
565 DEBUG_PRINTF("event literal\n");
566 return false;
567 }
568
569 if (!coversGroup(build, lit_info)) {
570 DEBUG_PRINTF("does not cover group\n");
571 return false;
572 }
573
574 if (lit_info.group_mask & forbidden_squash_group) {
575 /* probably a delayed lit */
576 DEBUG_PRINTF("skipping as involves a forbidden group\n");
577 return false;
578 }
579
580 // Single-vertex, less constrained case than the multiple-vertex one below.
581 if (lit_info.vertices.size() == 1) {
582 const RoseVertex &v = *lit_info.vertices.begin();
583
584 if (build.hasDelayPred(v)) { /* due to rebuild issues */
585 return false;
586 }
587
588 /* there are two ways to be a group squasher:
589 * 1) only care about the first accepted match
590 * 2) can only match once after a pred match
591 *
592 * (2) requires analysis of the infix before v and is not implemented,
593 * TODO
594 */
595
596 /* Case 1 */
597
598 // Can't squash cases with accepts unless they are all
599 // simple-exhaustible.
600 if (any_of_in(g[v].reports, [&](ReportID report) {
601 return !isSimpleExhaustible(build.rm.getReport(report));
602 })) {
603 DEBUG_PRINTF("can't squash reporter\n");
604 return false;
605 }
606
607 /* Can't squash cases with a suffix without analysis of the suffix.
608 * TODO: look at suffixes */
609 if (g[v].suffix) {
610 return false;
611 }
612
613 // Out-edges must have inf max bound, + no other shenanigans */
614 for (const auto &e : out_edges_range(v, g)) {
615 if (g[e].maxBound != ROSE_BOUND_INF) {
616 return false;
617 }
618
619 if (g[target(e, g)].left) {
620 return false; /* is an infix rose trigger, TODO: analysis */
621 }
622 }
623
624 DEBUG_PRINTF("%u is a path 1 group squasher\n", id);
625 return true;
626
627 /* note: we could also squash the groups of its preds (if nobody else is
628 * using them. TODO. */
629 }
630
631 // Multiple-vertex case
632 for (auto v : lit_info.vertices) {
633 assert(!build.isAnyStart(v));
634
635 // Can't squash cases with accepts
636 if (!g[v].reports.empty()) {
637 return false;
638 }
639
640 // Suffixes and leftfixes are out too as first literal may not match
641 // for everyone.
642 if (!g[v].isBoring()) {
643 return false;
644 }
645
646 /* TODO: checks are solid but we should explain */
647 if (build.hasDelayPred(v) || build.hasAnchoredTablePred(v)) {
648 return false;
649 }
650
651 // Out-edges must have inf max bound and not directly lead to another
652 // vertex with this group, e.g. 'foobar.*foobar'.
653 for (const auto &e : out_edges_range(v, g)) {
654 if (g[e].maxBound != ROSE_BOUND_INF) {
655 return false;
656 }
657 RoseVertex t = target(e, g);
658
659 if (g[t].left) {
660 return false; /* is an infix rose trigger */
661 }
662
663 for (u32 lit_id : g[t].literals) {
664 if (build.literal_info[lit_id].group_mask &
665 lit_info.group_mask) {
666 return false;
667 }
668 }
669 }
670
671 // In-edges must all be dot-stars with no overlap at all, as overlap
672 // also causes history to be used.
673 /* Different tables are already forbidden by previous checks */
674 for (const auto &e : in_edges_range(v, g)) {
675 if (!(g[e].minBound == 0 && g[e].maxBound == ROSE_BOUND_INF)) {
676 return false;
677 }
678
679 // Check overlap, if source was a literal.
680 RoseVertex u = source(e, g);
681 if (build.maxLiteralOverlap(u, v)) {
682 return false;
683 }
684 }
685 }
686
687 DEBUG_PRINTF("literal %u is a multi-vertex group squasher\n", id);
688 return true;
689}
690
691void findGroupSquashers(RoseBuildImpl &build) {
692 rose_group forbidden_squash_group = build.boundary_group_mask;
693 for (u32 id = 0; id < build.literals.size(); id++) {
694 const auto &lit = build.literals.at(id);
695 if (lit.delay) {
696 forbidden_squash_group |= build.literal_info[id].group_mask;
697 }
698 }
699
700 for (u32 id = 0; id < build.literal_info.size(); id++) {
701 if (isGroupSquasher(build, id, forbidden_squash_group)) {
702 build.literal_info[id].squash_group = true;
703 }
704 }
705}
706
707} // namespace ue2
708