1 | /* |
2 | * Copyright © 2007,2008,2009,2010 Red Hat, Inc. |
3 | * Copyright © 2010,2012 Google, Inc. |
4 | * |
5 | * This is part of HarfBuzz, a text shaping library. |
6 | * |
7 | * Permission is hereby granted, without written agreement and without |
8 | * license or royalty fees, to use, copy, modify, and distribute this |
9 | * software and its documentation for any purpose, provided that the |
10 | * above copyright notice and the following two paragraphs appear in |
11 | * all copies of this software. |
12 | * |
13 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | * DAMAGE. |
18 | * |
19 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | * |
25 | * Red Hat Author(s): Behdad Esfahbod |
26 | * Google Author(s): Behdad Esfahbod |
27 | */ |
28 | |
29 | #ifndef HB_OT_LAYOUT_GSUBGPOS_HH |
30 | #define HB_OT_LAYOUT_GSUBGPOS_HH |
31 | |
32 | #include "hb.hh" |
33 | #include "hb-buffer.hh" |
34 | #include "hb-map.hh" |
35 | #include "hb-set.hh" |
36 | #include "hb-ot-map.hh" |
37 | #include "hb-ot-layout-common.hh" |
38 | #include "hb-ot-layout-gdef-table.hh" |
39 | |
40 | |
41 | namespace OT { |
42 | |
43 | |
44 | struct hb_intersects_context_t : |
45 | hb_dispatch_context_t<hb_intersects_context_t, bool> |
46 | { |
47 | template <typename T> |
48 | return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); } |
49 | static return_t default_return_value () { return false; } |
50 | bool stop_sublookup_iteration (return_t r) const { return r; } |
51 | |
52 | const hb_set_t *glyphs; |
53 | |
54 | hb_intersects_context_t (const hb_set_t *glyphs_) : |
55 | glyphs (glyphs_) {} |
56 | }; |
57 | |
58 | struct hb_have_non_1to1_context_t : |
59 | hb_dispatch_context_t<hb_have_non_1to1_context_t, bool> |
60 | { |
61 | template <typename T> |
62 | return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); } |
63 | static return_t default_return_value () { return false; } |
64 | bool stop_sublookup_iteration (return_t r) const { return r; } |
65 | }; |
66 | |
67 | struct hb_closure_context_t : |
68 | hb_dispatch_context_t<hb_closure_context_t> |
69 | { |
70 | typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index); |
71 | template <typename T> |
72 | return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); } |
73 | static return_t default_return_value () { return hb_empty_t (); } |
74 | void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index) |
75 | { |
76 | if (unlikely (nesting_level_left == 0 || !recurse_func)) |
77 | return; |
78 | |
79 | nesting_level_left--; |
80 | recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index); |
81 | nesting_level_left++; |
82 | } |
83 | |
84 | void reset_lookup_visit_count () |
85 | { lookup_count = 0; } |
86 | |
87 | bool lookup_limit_exceeded () |
88 | { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; } |
89 | |
90 | bool should_visit_lookup (unsigned int lookup_index) |
91 | { |
92 | if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT) |
93 | return false; |
94 | |
95 | if (is_lookup_done (lookup_index)) |
96 | return false; |
97 | |
98 | return true; |
99 | } |
100 | |
101 | bool is_lookup_done (unsigned int lookup_index) |
102 | { |
103 | if (unlikely (done_lookups_glyph_count->in_error () || |
104 | done_lookups_glyph_set->in_error ())) |
105 | return true; |
106 | |
107 | /* Have we visited this lookup with the current set of glyphs? */ |
108 | if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ()) |
109 | { |
110 | done_lookups_glyph_count->set (lookup_index, glyphs->get_population ()); |
111 | |
112 | if (!done_lookups_glyph_set->has (lookup_index)) |
113 | { |
114 | if (unlikely (!done_lookups_glyph_set->set (lookup_index, hb::unique_ptr<hb_set_t> {hb_set_create ()}))) |
115 | return true; |
116 | } |
117 | |
118 | done_lookups_glyph_set->get (lookup_index)->clear (); |
119 | } |
120 | |
121 | hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index); |
122 | if (unlikely (covered_glyph_set->in_error ())) |
123 | return true; |
124 | if (parent_active_glyphs ().is_subset (*covered_glyph_set)) |
125 | return true; |
126 | |
127 | covered_glyph_set->union_ (parent_active_glyphs ()); |
128 | return false; |
129 | } |
130 | |
131 | const hb_set_t& previous_parent_active_glyphs () { |
132 | if (active_glyphs_stack.length <= 1) |
133 | return *glyphs; |
134 | |
135 | return active_glyphs_stack[active_glyphs_stack.length - 2]; |
136 | } |
137 | |
138 | const hb_set_t& parent_active_glyphs () |
139 | { |
140 | if (!active_glyphs_stack) |
141 | return *glyphs; |
142 | |
143 | return active_glyphs_stack.tail (); |
144 | } |
145 | |
146 | hb_set_t* push_cur_active_glyphs () |
147 | { |
148 | hb_set_t *s = active_glyphs_stack.push (); |
149 | if (unlikely (active_glyphs_stack.in_error ())) |
150 | return nullptr; |
151 | return s; |
152 | } |
153 | |
154 | bool pop_cur_done_glyphs () |
155 | { |
156 | if (!active_glyphs_stack) |
157 | return false; |
158 | |
159 | active_glyphs_stack.pop (); |
160 | return true; |
161 | } |
162 | |
163 | hb_face_t *face; |
164 | hb_set_t *glyphs; |
165 | hb_set_t output[1]; |
166 | hb_vector_t<hb_set_t> active_glyphs_stack; |
167 | recurse_func_t recurse_func = nullptr; |
168 | unsigned int nesting_level_left; |
169 | |
170 | hb_closure_context_t (hb_face_t *face_, |
171 | hb_set_t *glyphs_, |
172 | hb_map_t *done_lookups_glyph_count_, |
173 | hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set_, |
174 | unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : |
175 | face (face_), |
176 | glyphs (glyphs_), |
177 | nesting_level_left (nesting_level_left_), |
178 | done_lookups_glyph_count (done_lookups_glyph_count_), |
179 | done_lookups_glyph_set (done_lookups_glyph_set_) |
180 | {} |
181 | |
182 | ~hb_closure_context_t () { flush (); } |
183 | |
184 | void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
185 | |
186 | void flush () |
187 | { |
188 | output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */ |
189 | glyphs->union_ (*output); |
190 | output->clear (); |
191 | active_glyphs_stack.pop (); |
192 | active_glyphs_stack.reset (); |
193 | } |
194 | |
195 | private: |
196 | hb_map_t *done_lookups_glyph_count; |
197 | hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set; |
198 | unsigned int lookup_count = 0; |
199 | }; |
200 | |
201 | |
202 | |
203 | struct hb_closure_lookups_context_t : |
204 | hb_dispatch_context_t<hb_closure_lookups_context_t> |
205 | { |
206 | typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index); |
207 | template <typename T> |
208 | return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); } |
209 | static return_t default_return_value () { return hb_empty_t (); } |
210 | void recurse (unsigned lookup_index) |
211 | { |
212 | if (unlikely (nesting_level_left == 0 || !recurse_func)) |
213 | return; |
214 | |
215 | /* Return if new lookup was recursed to before. */ |
216 | if (lookup_limit_exceeded () |
217 | || visited_lookups->in_error () |
218 | || visited_lookups->has (lookup_index)) |
219 | // Don't increment lookup count here, that will be done in the call to closure_lookups() |
220 | // made by recurse_func. |
221 | return; |
222 | |
223 | nesting_level_left--; |
224 | recurse_func (this, lookup_index); |
225 | nesting_level_left++; |
226 | } |
227 | |
228 | void set_lookup_visited (unsigned lookup_index) |
229 | { visited_lookups->add (lookup_index); } |
230 | |
231 | void set_lookup_inactive (unsigned lookup_index) |
232 | { inactive_lookups->add (lookup_index); } |
233 | |
234 | bool lookup_limit_exceeded () |
235 | { |
236 | bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; |
237 | if (ret) |
238 | DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!" ); |
239 | return ret; } |
240 | |
241 | bool is_lookup_visited (unsigned lookup_index) |
242 | { |
243 | if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)) |
244 | { |
245 | DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped." , |
246 | lookup_count, lookup_index); |
247 | return true; |
248 | } |
249 | |
250 | if (unlikely (visited_lookups->in_error ())) |
251 | return true; |
252 | |
253 | return visited_lookups->has (lookup_index); |
254 | } |
255 | |
256 | hb_face_t *face; |
257 | const hb_set_t *glyphs; |
258 | recurse_func_t recurse_func; |
259 | unsigned int nesting_level_left; |
260 | |
261 | hb_closure_lookups_context_t (hb_face_t *face_, |
262 | const hb_set_t *glyphs_, |
263 | hb_set_t *visited_lookups_, |
264 | hb_set_t *inactive_lookups_, |
265 | unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) : |
266 | face (face_), |
267 | glyphs (glyphs_), |
268 | recurse_func (nullptr), |
269 | nesting_level_left (nesting_level_left_), |
270 | visited_lookups (visited_lookups_), |
271 | inactive_lookups (inactive_lookups_), |
272 | lookup_count (0) {} |
273 | |
274 | void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
275 | |
276 | private: |
277 | hb_set_t *visited_lookups; |
278 | hb_set_t *inactive_lookups; |
279 | unsigned int lookup_count; |
280 | }; |
281 | |
282 | struct hb_would_apply_context_t : |
283 | hb_dispatch_context_t<hb_would_apply_context_t, bool> |
284 | { |
285 | template <typename T> |
286 | return_t dispatch (const T &obj) { return obj.would_apply (this); } |
287 | static return_t default_return_value () { return false; } |
288 | bool stop_sublookup_iteration (return_t r) const { return r; } |
289 | |
290 | hb_face_t *face; |
291 | const hb_codepoint_t *glyphs; |
292 | unsigned int len; |
293 | bool zero_context; |
294 | |
295 | hb_would_apply_context_t (hb_face_t *face_, |
296 | const hb_codepoint_t *glyphs_, |
297 | unsigned int len_, |
298 | bool zero_context_) : |
299 | face (face_), |
300 | glyphs (glyphs_), |
301 | len (len_), |
302 | zero_context (zero_context_) {} |
303 | }; |
304 | |
305 | struct hb_collect_glyphs_context_t : |
306 | hb_dispatch_context_t<hb_collect_glyphs_context_t> |
307 | { |
308 | typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); |
309 | template <typename T> |
310 | return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); } |
311 | static return_t default_return_value () { return hb_empty_t (); } |
312 | void recurse (unsigned int lookup_index) |
313 | { |
314 | if (unlikely (nesting_level_left == 0 || !recurse_func)) |
315 | return; |
316 | |
317 | /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get |
318 | * past the previous check. For GSUB, we only want to collect the output |
319 | * glyphs in the recursion. If output is not requested, we can go home now. |
320 | * |
321 | * Note further, that the above is not exactly correct. A recursed lookup |
322 | * is allowed to match input that is not matched in the context, but that's |
323 | * not how most fonts are built. It's possible to relax that and recurse |
324 | * with all sets here if it proves to be an issue. |
325 | */ |
326 | |
327 | if (output == hb_set_get_empty ()) |
328 | return; |
329 | |
330 | /* Return if new lookup was recursed to before. */ |
331 | if (recursed_lookups->has (lookup_index)) |
332 | return; |
333 | |
334 | hb_set_t *old_before = before; |
335 | hb_set_t *old_input = input; |
336 | hb_set_t *old_after = after; |
337 | before = input = after = hb_set_get_empty (); |
338 | |
339 | nesting_level_left--; |
340 | recurse_func (this, lookup_index); |
341 | nesting_level_left++; |
342 | |
343 | before = old_before; |
344 | input = old_input; |
345 | after = old_after; |
346 | |
347 | recursed_lookups->add (lookup_index); |
348 | } |
349 | |
350 | hb_face_t *face; |
351 | hb_set_t *before; |
352 | hb_set_t *input; |
353 | hb_set_t *after; |
354 | hb_set_t *output; |
355 | recurse_func_t recurse_func; |
356 | hb_set_t *recursed_lookups; |
357 | unsigned int nesting_level_left; |
358 | |
359 | hb_collect_glyphs_context_t (hb_face_t *face_, |
360 | hb_set_t *glyphs_before, /* OUT. May be NULL */ |
361 | hb_set_t *glyphs_input, /* OUT. May be NULL */ |
362 | hb_set_t *glyphs_after, /* OUT. May be NULL */ |
363 | hb_set_t *glyphs_output, /* OUT. May be NULL */ |
364 | unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : |
365 | face (face_), |
366 | before (glyphs_before ? glyphs_before : hb_set_get_empty ()), |
367 | input (glyphs_input ? glyphs_input : hb_set_get_empty ()), |
368 | after (glyphs_after ? glyphs_after : hb_set_get_empty ()), |
369 | output (glyphs_output ? glyphs_output : hb_set_get_empty ()), |
370 | recurse_func (nullptr), |
371 | recursed_lookups (hb_set_create ()), |
372 | nesting_level_left (nesting_level_left_) {} |
373 | ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); } |
374 | |
375 | void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
376 | }; |
377 | |
378 | |
379 | |
380 | template <typename set_t> |
381 | struct hb_collect_coverage_context_t : |
382 | hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &> |
383 | { |
384 | typedef const Coverage &return_t; // Stoopid that we have to dupe this here. |
385 | template <typename T> |
386 | return_t dispatch (const T &obj) { return obj.get_coverage (); } |
387 | static return_t default_return_value () { return Null (Coverage); } |
388 | bool stop_sublookup_iteration (return_t r) const |
389 | { |
390 | r.collect_coverage (set); |
391 | return false; |
392 | } |
393 | |
394 | hb_collect_coverage_context_t (set_t *set_) : |
395 | set (set_) {} |
396 | |
397 | set_t *set; |
398 | }; |
399 | |
400 | struct hb_ot_apply_context_t : |
401 | hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY> |
402 | { |
403 | struct matcher_t |
404 | { |
405 | typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data); |
406 | |
407 | void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } |
408 | void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } |
409 | void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } |
410 | void set_mask (hb_mask_t mask_) { mask = mask_; } |
411 | void set_per_syllable (bool per_syllable_) { per_syllable = per_syllable_; } |
412 | void set_syllable (uint8_t syllable_) { syllable = per_syllable ? syllable_ : 0; } |
413 | void set_match_func (match_func_t match_func_, |
414 | const void *match_data_) |
415 | { match_func = match_func_; match_data = match_data_; } |
416 | |
417 | enum may_match_t { |
418 | MATCH_NO, |
419 | MATCH_YES, |
420 | MATCH_MAYBE |
421 | }; |
422 | |
423 | #ifndef HB_OPTIMIZE_SIZE |
424 | HB_ALWAYS_INLINE |
425 | #endif |
426 | may_match_t may_match (hb_glyph_info_t &info, |
427 | hb_codepoint_t glyph_data) const |
428 | { |
429 | if (!(info.mask & mask) || |
430 | (syllable && syllable != info.syllable ())) |
431 | return MATCH_NO; |
432 | |
433 | if (match_func) |
434 | return match_func (info, glyph_data, match_data) ? MATCH_YES : MATCH_NO; |
435 | |
436 | return MATCH_MAYBE; |
437 | } |
438 | |
439 | enum may_skip_t { |
440 | SKIP_NO, |
441 | SKIP_YES, |
442 | SKIP_MAYBE |
443 | }; |
444 | |
445 | #ifndef HB_OPTIMIZE_SIZE |
446 | HB_ALWAYS_INLINE |
447 | #endif |
448 | may_skip_t may_skip (const hb_ot_apply_context_t *c, |
449 | const hb_glyph_info_t &info) const |
450 | { |
451 | if (!c->check_glyph_property (&info, lookup_props)) |
452 | return SKIP_YES; |
453 | |
454 | if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) && |
455 | (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && |
456 | (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) |
457 | return SKIP_MAYBE; |
458 | |
459 | return SKIP_NO; |
460 | } |
461 | |
462 | protected: |
463 | unsigned int lookup_props = 0; |
464 | hb_mask_t mask = -1; |
465 | bool ignore_zwnj = false; |
466 | bool ignore_zwj = false; |
467 | bool per_syllable = false; |
468 | uint8_t syllable = 0; |
469 | match_func_t match_func = nullptr; |
470 | const void *match_data = nullptr; |
471 | }; |
472 | |
473 | struct skipping_iterator_t |
474 | { |
475 | void init (hb_ot_apply_context_t *c_, bool context_match = false) |
476 | { |
477 | c = c_; |
478 | end = c->buffer->len; |
479 | match_glyph_data16 = nullptr; |
480 | #ifndef HB_NO_BEYOND_64K |
481 | match_glyph_data24 = nullptr; |
482 | #endif |
483 | matcher.set_match_func (nullptr, nullptr); |
484 | matcher.set_lookup_props (c->lookup_props); |
485 | /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */ |
486 | matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj)); |
487 | /* Ignore ZWJ if we are matching context, or asked to. */ |
488 | matcher.set_ignore_zwj (context_match || c->auto_zwj); |
489 | matcher.set_mask (context_match ? -1 : c->lookup_mask); |
490 | /* Per syllable matching is only for GSUB. */ |
491 | matcher.set_per_syllable (c->table_index == 0 && c->per_syllable); |
492 | matcher.set_syllable (0); |
493 | } |
494 | void set_lookup_props (unsigned int lookup_props) |
495 | { |
496 | matcher.set_lookup_props (lookup_props); |
497 | } |
498 | void set_match_func (matcher_t::match_func_t match_func_, |
499 | const void *match_data_) |
500 | { |
501 | matcher.set_match_func (match_func_, match_data_); |
502 | } |
503 | void set_glyph_data (const HBUINT16 glyph_data[]) |
504 | { |
505 | match_glyph_data16 = glyph_data; |
506 | #ifndef HB_NO_BEYOND_64K |
507 | match_glyph_data24 = nullptr; |
508 | #endif |
509 | } |
510 | #ifndef HB_NO_BEYOND_64K |
511 | void set_glyph_data (const HBUINT24 glyph_data[]) |
512 | { |
513 | match_glyph_data16 = nullptr; |
514 | match_glyph_data24 = glyph_data; |
515 | } |
516 | #endif |
517 | |
518 | #ifndef HB_OPTIMIZE_SIZE |
519 | HB_ALWAYS_INLINE |
520 | #endif |
521 | void reset (unsigned int start_index_) |
522 | { |
523 | idx = start_index_; |
524 | end = c->buffer->len; |
525 | matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); |
526 | } |
527 | |
528 | #ifndef HB_OPTIMIZE_SIZE |
529 | HB_ALWAYS_INLINE |
530 | #endif |
531 | void reset_fast (unsigned int start_index_) |
532 | { |
533 | // Doesn't set end or syllable. Used by GPOS which doesn't care / change. |
534 | idx = start_index_; |
535 | } |
536 | |
537 | void reject () |
538 | { |
539 | backup_glyph_data (); |
540 | } |
541 | |
542 | matcher_t::may_skip_t |
543 | #ifndef HB_OPTIMIZE_SIZE |
544 | HB_ALWAYS_INLINE |
545 | #endif |
546 | may_skip (const hb_glyph_info_t &info) const |
547 | { return matcher.may_skip (c, info); } |
548 | |
549 | enum match_t { |
550 | MATCH, |
551 | NOT_MATCH, |
552 | SKIP |
553 | }; |
554 | |
555 | #ifndef HB_OPTIMIZE_SIZE |
556 | HB_ALWAYS_INLINE |
557 | #endif |
558 | match_t match (hb_glyph_info_t &info) |
559 | { |
560 | matcher_t::may_skip_t skip = matcher.may_skip (c, info); |
561 | if (unlikely (skip == matcher_t::SKIP_YES)) |
562 | return SKIP; |
563 | |
564 | matcher_t::may_match_t match = matcher.may_match (info, get_glyph_data ()); |
565 | if (match == matcher_t::MATCH_YES || |
566 | (match == matcher_t::MATCH_MAYBE && |
567 | skip == matcher_t::SKIP_NO)) |
568 | return MATCH; |
569 | |
570 | if (skip == matcher_t::SKIP_NO) |
571 | return NOT_MATCH; |
572 | |
573 | return SKIP; |
574 | } |
575 | |
576 | #ifndef HB_OPTIMIZE_SIZE |
577 | HB_ALWAYS_INLINE |
578 | #endif |
579 | bool next (unsigned *unsafe_to = nullptr) |
580 | { |
581 | const signed stop = (signed) end - 1; |
582 | while ((signed) idx < stop) |
583 | { |
584 | idx++; |
585 | switch (match (c->buffer->info[idx])) |
586 | { |
587 | case MATCH: |
588 | { |
589 | advance_glyph_data (); |
590 | return true; |
591 | } |
592 | case NOT_MATCH: |
593 | { |
594 | if (unsafe_to) |
595 | *unsafe_to = idx + 1; |
596 | return false; |
597 | } |
598 | case SKIP: |
599 | continue; |
600 | } |
601 | } |
602 | if (unsafe_to) |
603 | *unsafe_to = end; |
604 | return false; |
605 | } |
606 | #ifndef HB_OPTIMIZE_SIZE |
607 | HB_ALWAYS_INLINE |
608 | #endif |
609 | bool prev (unsigned *unsafe_from = nullptr) |
610 | { |
611 | const unsigned stop = 0; |
612 | while (idx > stop) |
613 | { |
614 | idx--; |
615 | switch (match (c->buffer->out_info[idx])) |
616 | { |
617 | case MATCH: |
618 | { |
619 | advance_glyph_data (); |
620 | return true; |
621 | } |
622 | case NOT_MATCH: |
623 | { |
624 | if (unsafe_from) |
625 | *unsafe_from = hb_max (1u, idx) - 1u; |
626 | return false; |
627 | } |
628 | case SKIP: |
629 | continue; |
630 | } |
631 | } |
632 | if (unsafe_from) |
633 | *unsafe_from = 0; |
634 | return false; |
635 | } |
636 | |
637 | HB_ALWAYS_INLINE |
638 | hb_codepoint_t |
639 | get_glyph_data () |
640 | { |
641 | if (match_glyph_data16) return *match_glyph_data16; |
642 | #ifndef HB_NO_BEYOND_64K |
643 | else |
644 | if (match_glyph_data24) return *match_glyph_data24; |
645 | #endif |
646 | return 0; |
647 | } |
648 | HB_ALWAYS_INLINE |
649 | void |
650 | advance_glyph_data () |
651 | { |
652 | if (match_glyph_data16) match_glyph_data16++; |
653 | #ifndef HB_NO_BEYOND_64K |
654 | else |
655 | if (match_glyph_data24) match_glyph_data24++; |
656 | #endif |
657 | } |
658 | void |
659 | backup_glyph_data () |
660 | { |
661 | if (match_glyph_data16) match_glyph_data16--; |
662 | #ifndef HB_NO_BEYOND_64K |
663 | else |
664 | if (match_glyph_data24) match_glyph_data24--; |
665 | #endif |
666 | } |
667 | |
668 | unsigned int idx; |
669 | protected: |
670 | hb_ot_apply_context_t *c; |
671 | matcher_t matcher; |
672 | const HBUINT16 *match_glyph_data16; |
673 | #ifndef HB_NO_BEYOND_64K |
674 | const HBUINT24 *match_glyph_data24; |
675 | #endif |
676 | |
677 | unsigned int end; |
678 | }; |
679 | |
680 | |
681 | const char *get_name () { return "APPLY" ; } |
682 | typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index); |
683 | template <typename T> |
684 | return_t dispatch (const T &obj) { return obj.apply (this); } |
685 | static return_t default_return_value () { return false; } |
686 | bool stop_sublookup_iteration (return_t r) const { return r; } |
687 | return_t recurse (unsigned int sub_lookup_index) |
688 | { |
689 | if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0)) |
690 | { |
691 | buffer->shaping_failed = true; |
692 | return default_return_value (); |
693 | } |
694 | |
695 | nesting_level_left--; |
696 | bool ret = recurse_func (this, sub_lookup_index); |
697 | nesting_level_left++; |
698 | return ret; |
699 | } |
700 | |
701 | skipping_iterator_t iter_input, iter_context; |
702 | |
703 | unsigned int table_index; /* GSUB/GPOS */ |
704 | hb_font_t *font; |
705 | hb_face_t *face; |
706 | hb_buffer_t *buffer; |
707 | hb_sanitize_context_t sanitizer; |
708 | recurse_func_t recurse_func = nullptr; |
709 | const GDEF &gdef; |
710 | const GDEF::accelerator_t &gdef_accel; |
711 | const VariationStore &var_store; |
712 | VariationStore::cache_t *var_store_cache; |
713 | hb_set_digest_t digest; |
714 | |
715 | hb_direction_t direction; |
716 | hb_mask_t lookup_mask = 1; |
717 | unsigned int lookup_index = (unsigned) -1; |
718 | unsigned int lookup_props = 0; |
719 | unsigned int nesting_level_left = HB_MAX_NESTING_LEVEL; |
720 | |
721 | bool has_glyph_classes; |
722 | bool auto_zwnj = true; |
723 | bool auto_zwj = true; |
724 | bool per_syllable = false; |
725 | bool random = false; |
726 | uint32_t random_state = 1; |
727 | unsigned new_syllables = (unsigned) -1; |
728 | |
729 | signed last_base = -1; // GPOS uses |
730 | unsigned last_base_until = 0; // GPOS uses |
731 | |
732 | hb_ot_apply_context_t (unsigned int table_index_, |
733 | hb_font_t *font_, |
734 | hb_buffer_t *buffer_, |
735 | hb_blob_t *table_blob_) : |
736 | table_index (table_index_), |
737 | font (font_), face (font->face), buffer (buffer_), |
738 | sanitizer (table_blob_), |
739 | gdef ( |
740 | #ifndef HB_NO_OT_LAYOUT |
741 | *face->table.GDEF->table |
742 | #else |
743 | Null (GDEF) |
744 | #endif |
745 | ), |
746 | gdef_accel ( |
747 | #ifndef HB_NO_OT_LAYOUT |
748 | *face->table.GDEF |
749 | #else |
750 | Null (GDEF::accelerator_t) |
751 | #endif |
752 | ), |
753 | var_store (gdef.get_var_store ()), |
754 | var_store_cache ( |
755 | #ifndef HB_NO_VAR |
756 | table_index == 1 && font->num_coords ? var_store.create_cache () : nullptr |
757 | #else |
758 | nullptr |
759 | #endif |
760 | ), |
761 | digest (buffer_->digest ()), |
762 | direction (buffer_->props.direction), |
763 | has_glyph_classes (gdef.has_glyph_classes ()) |
764 | { init_iters (); } |
765 | |
766 | ~hb_ot_apply_context_t () |
767 | { |
768 | #ifndef HB_NO_VAR |
769 | VariationStore::destroy_cache (var_store_cache); |
770 | #endif |
771 | } |
772 | |
773 | void init_iters () |
774 | { |
775 | iter_input.init (this, false); |
776 | iter_context.init (this, true); |
777 | } |
778 | |
779 | void set_lookup_mask (hb_mask_t mask, bool init = true) { lookup_mask = mask; last_base = -1; last_base_until = 0; if (init) init_iters (); } |
780 | void set_auto_zwj (bool auto_zwj_, bool init = true) { auto_zwj = auto_zwj_; if (init) init_iters (); } |
781 | void set_auto_zwnj (bool auto_zwnj_, bool init = true) { auto_zwnj = auto_zwnj_; if (init) init_iters (); } |
782 | void set_per_syllable (bool per_syllable_, bool init = true) { per_syllable = per_syllable_; if (init) init_iters (); } |
783 | void set_random (bool random_) { random = random_; } |
784 | void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
785 | void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } |
786 | void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); } |
787 | |
788 | uint32_t random_number () |
789 | { |
790 | /* http://www.cplusplus.com/reference/random/minstd_rand/ */ |
791 | random_state = random_state * 48271 % 2147483647; |
792 | return random_state; |
793 | } |
794 | |
795 | bool match_properties_mark (hb_codepoint_t glyph, |
796 | unsigned int glyph_props, |
797 | unsigned int match_props) const |
798 | { |
799 | /* If using mark filtering sets, the high short of |
800 | * match_props has the set index. |
801 | */ |
802 | if (match_props & LookupFlag::UseMarkFilteringSet) |
803 | return gdef_accel.mark_set_covers (match_props >> 16, glyph); |
804 | |
805 | /* The second byte of match_props has the meaning |
806 | * "ignore marks of attachment type different than |
807 | * the attachment type specified." |
808 | */ |
809 | if (match_props & LookupFlag::MarkAttachmentType) |
810 | return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); |
811 | |
812 | return true; |
813 | } |
814 | |
815 | #ifndef HB_OPTIMIZE_SIZE |
816 | HB_ALWAYS_INLINE |
817 | #endif |
818 | bool check_glyph_property (const hb_glyph_info_t *info, |
819 | unsigned int match_props) const |
820 | { |
821 | unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); |
822 | |
823 | /* Not covered, if, for example, glyph class is ligature and |
824 | * match_props includes LookupFlags::IgnoreLigatures |
825 | */ |
826 | if (glyph_props & match_props & LookupFlag::IgnoreFlags) |
827 | return false; |
828 | |
829 | if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) |
830 | return match_properties_mark (info->codepoint, glyph_props, match_props); |
831 | |
832 | return true; |
833 | } |
834 | |
835 | void _set_glyph_class (hb_codepoint_t glyph_index, |
836 | unsigned int class_guess = 0, |
837 | bool ligature = false, |
838 | bool component = false) |
839 | { |
840 | digest.add (glyph_index); |
841 | |
842 | if (new_syllables != (unsigned) -1) |
843 | buffer->cur().syllable() = new_syllables; |
844 | |
845 | unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur()); |
846 | props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; |
847 | if (ligature) |
848 | { |
849 | props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; |
850 | /* In the only place that the MULTIPLIED bit is used, Uniscribe |
851 | * seems to only care about the "last" transformation between |
852 | * Ligature and Multiple substitutions. Ie. if you ligate, expand, |
853 | * and ligate again, it forgives the multiplication and acts as |
854 | * if only ligation happened. As such, clear MULTIPLIED bit. |
855 | */ |
856 | props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; |
857 | } |
858 | if (component) |
859 | props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; |
860 | if (likely (has_glyph_classes)) |
861 | { |
862 | props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; |
863 | _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef_accel.get_glyph_props (glyph_index)); |
864 | } |
865 | else if (class_guess) |
866 | { |
867 | props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; |
868 | _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess); |
869 | } |
870 | else |
871 | _hb_glyph_info_set_glyph_props (&buffer->cur(), props); |
872 | } |
873 | |
874 | void replace_glyph (hb_codepoint_t glyph_index) |
875 | { |
876 | _set_glyph_class (glyph_index); |
877 | (void) buffer->replace_glyph (glyph_index); |
878 | } |
879 | void replace_glyph_inplace (hb_codepoint_t glyph_index) |
880 | { |
881 | _set_glyph_class (glyph_index); |
882 | buffer->cur().codepoint = glyph_index; |
883 | } |
884 | void replace_glyph_with_ligature (hb_codepoint_t glyph_index, |
885 | unsigned int class_guess) |
886 | { |
887 | _set_glyph_class (glyph_index, class_guess, true); |
888 | (void) buffer->replace_glyph (glyph_index); |
889 | } |
890 | void output_glyph_for_component (hb_codepoint_t glyph_index, |
891 | unsigned int class_guess) |
892 | { |
893 | _set_glyph_class (glyph_index, class_guess, false, true); |
894 | (void) buffer->output_glyph (glyph_index); |
895 | } |
896 | }; |
897 | |
898 | |
899 | struct hb_accelerate_subtables_context_t : |
900 | hb_dispatch_context_t<hb_accelerate_subtables_context_t> |
901 | { |
902 | template <typename Type> |
903 | static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c) |
904 | { |
905 | const Type *typed_obj = (const Type *) obj; |
906 | return typed_obj->apply (c); |
907 | } |
908 | |
909 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
910 | template <typename T> |
911 | static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply_cached (c) ) |
912 | template <typename T> |
913 | static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) ) |
914 | template <typename Type> |
915 | static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c) |
916 | { |
917 | const Type *typed_obj = (const Type *) obj; |
918 | return apply_cached_ (typed_obj, c, hb_prioritize); |
919 | } |
920 | |
921 | template <typename T> |
922 | static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) ) |
923 | template <typename T> |
924 | static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; } |
925 | template <typename Type> |
926 | static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter) |
927 | { |
928 | const Type *typed_obj = (const Type *) obj; |
929 | return cache_func_ (typed_obj, c, enter, hb_prioritize); |
930 | } |
931 | #endif |
932 | |
933 | typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c); |
934 | typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter); |
935 | |
936 | struct hb_applicable_t |
937 | { |
938 | friend struct hb_accelerate_subtables_context_t; |
939 | friend struct hb_ot_layout_lookup_accelerator_t; |
940 | |
941 | template <typename T> |
942 | void init (const T &obj_, |
943 | hb_apply_func_t apply_func_ |
944 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
945 | , hb_apply_func_t apply_cached_func_ |
946 | , hb_cache_func_t cache_func_ |
947 | #endif |
948 | ) |
949 | { |
950 | obj = &obj_; |
951 | apply_func = apply_func_; |
952 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
953 | apply_cached_func = apply_cached_func_; |
954 | cache_func = cache_func_; |
955 | #endif |
956 | digest.init (); |
957 | obj_.get_coverage ().collect_coverage (&digest); |
958 | } |
959 | |
960 | bool apply (hb_ot_apply_context_t *c) const |
961 | { |
962 | return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c); |
963 | } |
964 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
965 | bool apply_cached (hb_ot_apply_context_t *c) const |
966 | { |
967 | return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c); |
968 | } |
969 | bool cache_enter (hb_ot_apply_context_t *c) const |
970 | { |
971 | return cache_func (obj, c, true); |
972 | } |
973 | void cache_leave (hb_ot_apply_context_t *c) const |
974 | { |
975 | cache_func (obj, c, false); |
976 | } |
977 | #endif |
978 | |
979 | private: |
980 | const void *obj; |
981 | hb_apply_func_t apply_func; |
982 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
983 | hb_apply_func_t apply_cached_func; |
984 | hb_cache_func_t cache_func; |
985 | #endif |
986 | hb_set_digest_t digest; |
987 | }; |
988 | |
989 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
990 | template <typename T> |
991 | auto cache_cost (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.cache_cost () ) |
992 | template <typename T> |
993 | auto cache_cost (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( 0u ) |
994 | #endif |
995 | |
996 | /* Dispatch interface. */ |
997 | template <typename T> |
998 | return_t dispatch (const T &obj) |
999 | { |
1000 | hb_applicable_t *entry = &array[i++]; |
1001 | |
1002 | entry->init (obj, |
1003 | apply_to<T> |
1004 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
1005 | , apply_cached_to<T> |
1006 | , cache_func_to<T> |
1007 | #endif |
1008 | ); |
1009 | |
1010 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
1011 | /* Cache handling |
1012 | * |
1013 | * We allow one subtable from each lookup to use a cache. The assumption |
1014 | * being that multiple subtables of the same lookup cannot use a cache |
1015 | * because the resources they would use will collide. As such, we ask |
1016 | * each subtable to tell us how much it costs (which a cache would avoid), |
1017 | * and we allocate the cache opportunity to the costliest subtable. |
1018 | */ |
1019 | unsigned cost = cache_cost (obj, hb_prioritize); |
1020 | if (cost > cache_user_cost) |
1021 | { |
1022 | cache_user_idx = i - 1; |
1023 | cache_user_cost = cost; |
1024 | } |
1025 | #endif |
1026 | |
1027 | return hb_empty_t (); |
1028 | } |
1029 | static return_t default_return_value () { return hb_empty_t (); } |
1030 | |
1031 | hb_accelerate_subtables_context_t (hb_applicable_t *array_) : |
1032 | array (array_) {} |
1033 | |
1034 | hb_applicable_t *array; |
1035 | unsigned i = 0; |
1036 | |
1037 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
1038 | unsigned cache_user_idx = (unsigned) -1; |
1039 | unsigned cache_user_cost = 0; |
1040 | #endif |
1041 | }; |
1042 | |
1043 | |
1044 | typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache); |
1045 | typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache); |
1046 | typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data); |
1047 | typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data); |
1048 | |
1049 | struct ContextClosureFuncs |
1050 | { |
1051 | intersects_func_t intersects; |
1052 | intersected_glyphs_func_t intersected_glyphs; |
1053 | }; |
1054 | struct ContextCollectGlyphsFuncs |
1055 | { |
1056 | collect_glyphs_func_t collect; |
1057 | }; |
1058 | struct ContextApplyFuncs |
1059 | { |
1060 | match_func_t match; |
1061 | }; |
1062 | struct ChainContextApplyFuncs |
1063 | { |
1064 | match_func_t match[3]; |
1065 | }; |
1066 | |
1067 | |
1068 | static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED) |
1069 | { |
1070 | return glyphs->has (value); |
1071 | } |
1072 | static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache) |
1073 | { |
1074 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
1075 | hb_map_t *map = (hb_map_t *) cache; |
1076 | |
1077 | hb_codepoint_t *cached_v; |
1078 | if (map->has (value, &cached_v)) |
1079 | return *cached_v; |
1080 | |
1081 | bool v = class_def.intersects_class (glyphs, value); |
1082 | map->set (value, v); |
1083 | |
1084 | return v; |
1085 | } |
1086 | static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED) |
1087 | { |
1088 | Offset16To<Coverage> coverage; |
1089 | coverage = value; |
1090 | return (data+coverage).intersects (glyphs); |
1091 | } |
1092 | |
1093 | |
1094 | static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache) |
1095 | { |
1096 | unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value]; |
1097 | intersected_glyphs->add (g); |
1098 | } |
1099 | |
1100 | using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>; |
1101 | |
1102 | static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache) |
1103 | { |
1104 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
1105 | |
1106 | intersected_class_cache_t *map = (intersected_class_cache_t *) cache; |
1107 | |
1108 | hb_set_t *cached_v; |
1109 | if (map->has (value, &cached_v)) |
1110 | { |
1111 | intersected_glyphs->union_ (*cached_v); |
1112 | return; |
1113 | } |
1114 | |
1115 | hb_set_t v; |
1116 | class_def.intersected_class_glyphs (glyphs, value, &v); |
1117 | |
1118 | intersected_glyphs->union_ (v); |
1119 | |
1120 | map->set (value, std::move (v)); |
1121 | } |
1122 | |
1123 | static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache) |
1124 | { |
1125 | Offset16To<Coverage> coverage; |
1126 | coverage = value; |
1127 | (data+coverage).intersect_set (*glyphs, *intersected_glyphs); |
1128 | } |
1129 | |
1130 | |
1131 | template <typename HBUINT> |
1132 | static inline bool array_is_subset_of (const hb_set_t *glyphs, |
1133 | unsigned int count, |
1134 | const HBUINT values[], |
1135 | intersects_func_t intersects_func, |
1136 | const void *intersects_data, |
1137 | void *cache) |
1138 | { |
1139 | for (const auto &_ : + hb_iter (values, count)) |
1140 | if (!intersects_func (glyphs, _, intersects_data, cache)) return false; |
1141 | return true; |
1142 | } |
1143 | |
1144 | |
1145 | static inline void collect_glyph (hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED) |
1146 | { |
1147 | glyphs->add (value); |
1148 | } |
1149 | static inline void collect_class (hb_set_t *glyphs, unsigned value, const void *data) |
1150 | { |
1151 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
1152 | class_def.collect_class (glyphs, value); |
1153 | } |
1154 | static inline void collect_coverage (hb_set_t *glyphs, unsigned value, const void *data) |
1155 | { |
1156 | Offset16To<Coverage> coverage; |
1157 | coverage = value; |
1158 | (data+coverage).collect_coverage (glyphs); |
1159 | } |
1160 | template <typename HBUINT> |
1161 | static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, |
1162 | hb_set_t *glyphs, |
1163 | unsigned int count, |
1164 | const HBUINT values[], |
1165 | collect_glyphs_func_t collect_func, |
1166 | const void *collect_data) |
1167 | { |
1168 | return |
1169 | + hb_iter (values, count) |
1170 | | hb_apply ([&] (const HBUINT &_) { collect_func (glyphs, _, collect_data); }) |
1171 | ; |
1172 | } |
1173 | |
1174 | |
1175 | static inline bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED) |
1176 | { |
1177 | return true; |
1178 | } |
1179 | static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED) |
1180 | { |
1181 | return info.codepoint == value; |
1182 | } |
1183 | static inline bool match_class (hb_glyph_info_t &info, unsigned value, const void *data) |
1184 | { |
1185 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
1186 | return class_def.get_class (info.codepoint) == value; |
1187 | } |
1188 | static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, const void *data) |
1189 | { |
1190 | unsigned klass = info.syllable(); |
1191 | if (klass < 255) |
1192 | return klass == value; |
1193 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
1194 | klass = class_def.get_class (info.codepoint); |
1195 | if (likely (klass < 255)) |
1196 | info.syllable() = klass; |
1197 | return klass == value; |
1198 | } |
1199 | static inline bool match_class_cached1 (hb_glyph_info_t &info, unsigned value, const void *data) |
1200 | { |
1201 | unsigned klass = info.syllable() & 0x0F; |
1202 | if (klass < 15) |
1203 | return klass == value; |
1204 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
1205 | klass = class_def.get_class (info.codepoint); |
1206 | if (likely (klass < 15)) |
1207 | info.syllable() = (info.syllable() & 0xF0) | klass; |
1208 | return klass == value; |
1209 | } |
1210 | static inline bool match_class_cached2 (hb_glyph_info_t &info, unsigned value, const void *data) |
1211 | { |
1212 | unsigned klass = (info.syllable() & 0xF0) >> 4; |
1213 | if (klass < 15) |
1214 | return klass == value; |
1215 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
1216 | klass = class_def.get_class (info.codepoint); |
1217 | if (likely (klass < 15)) |
1218 | info.syllable() = (info.syllable() & 0x0F) | (klass << 4); |
1219 | return klass == value; |
1220 | } |
1221 | static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data) |
1222 | { |
1223 | Offset16To<Coverage> coverage; |
1224 | coverage = value; |
1225 | return (data+coverage).get_coverage (info.codepoint) != NOT_COVERED; |
1226 | } |
1227 | |
1228 | template <typename HBUINT> |
1229 | static inline bool would_match_input (hb_would_apply_context_t *c, |
1230 | unsigned int count, /* Including the first glyph (not matched) */ |
1231 | const HBUINT input[], /* Array of input values--start with second glyph */ |
1232 | match_func_t match_func, |
1233 | const void *match_data) |
1234 | { |
1235 | if (count != c->len) |
1236 | return false; |
1237 | |
1238 | for (unsigned int i = 1; i < count; i++) |
1239 | { |
1240 | hb_glyph_info_t info; |
1241 | info.codepoint = c->glyphs[i]; |
1242 | if (likely (!match_func (info, input[i - 1], match_data))) |
1243 | return false; |
1244 | } |
1245 | |
1246 | return true; |
1247 | } |
1248 | template <typename HBUINT> |
1249 | #ifndef HB_OPTIMIZE_SIZE |
1250 | HB_ALWAYS_INLINE |
1251 | #endif |
1252 | static bool match_input (hb_ot_apply_context_t *c, |
1253 | unsigned int count, /* Including the first glyph (not matched) */ |
1254 | const HBUINT input[], /* Array of input values--start with second glyph */ |
1255 | match_func_t match_func, |
1256 | const void *match_data, |
1257 | unsigned int *end_position, |
1258 | unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], |
1259 | unsigned int *p_total_component_count = nullptr) |
1260 | { |
1261 | TRACE_APPLY (nullptr); |
1262 | |
1263 | if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false); |
1264 | |
1265 | hb_buffer_t *buffer = c->buffer; |
1266 | |
1267 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
1268 | skippy_iter.reset (buffer->idx); |
1269 | skippy_iter.set_match_func (match_func, match_data); |
1270 | skippy_iter.set_glyph_data (input); |
1271 | |
1272 | /* |
1273 | * This is perhaps the trickiest part of OpenType... Remarks: |
1274 | * |
1275 | * - If all components of the ligature were marks, we call this a mark ligature. |
1276 | * |
1277 | * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize |
1278 | * it as a ligature glyph. |
1279 | * |
1280 | * - Ligatures cannot be formed across glyphs attached to different components |
1281 | * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and |
1282 | * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. |
1283 | * However, it would be wrong to ligate that SHADDA,FATHA sequence. |
1284 | * There are a couple of exceptions to this: |
1285 | * |
1286 | * o If a ligature tries ligating with marks that belong to it itself, go ahead, |
1287 | * assuming that the font designer knows what they are doing (otherwise it can |
1288 | * break Indic stuff when a matra wants to ligate with a conjunct, |
1289 | * |
1290 | * o If two marks want to ligate and they belong to different components of the |
1291 | * same ligature glyph, and said ligature glyph is to be ignored according to |
1292 | * mark-filtering rules, then allow. |
1293 | * https://github.com/harfbuzz/harfbuzz/issues/545 |
1294 | */ |
1295 | |
1296 | unsigned int total_component_count = 0; |
1297 | |
1298 | unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
1299 | unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
1300 | |
1301 | enum { |
1302 | LIGBASE_NOT_CHECKED, |
1303 | LIGBASE_MAY_NOT_SKIP, |
1304 | LIGBASE_MAY_SKIP |
1305 | } ligbase = LIGBASE_NOT_CHECKED; |
1306 | |
1307 | for (unsigned int i = 1; i < count; i++) |
1308 | { |
1309 | unsigned unsafe_to; |
1310 | if (!skippy_iter.next (&unsafe_to)) |
1311 | { |
1312 | *end_position = unsafe_to; |
1313 | return_trace (false); |
1314 | } |
1315 | |
1316 | match_positions[i] = skippy_iter.idx; |
1317 | |
1318 | unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); |
1319 | unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); |
1320 | |
1321 | if (first_lig_id && first_lig_comp) |
1322 | { |
1323 | /* If first component was attached to a previous ligature component, |
1324 | * all subsequent components should be attached to the same ligature |
1325 | * component, otherwise we shouldn't ligate them... */ |
1326 | if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) |
1327 | { |
1328 | /* ...unless, we are attached to a base ligature and that base |
1329 | * ligature is ignorable. */ |
1330 | if (ligbase == LIGBASE_NOT_CHECKED) |
1331 | { |
1332 | bool found = false; |
1333 | const auto *out = buffer->out_info; |
1334 | unsigned int j = buffer->out_len; |
1335 | while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id) |
1336 | { |
1337 | if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0) |
1338 | { |
1339 | j--; |
1340 | found = true; |
1341 | break; |
1342 | } |
1343 | j--; |
1344 | } |
1345 | |
1346 | if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES) |
1347 | ligbase = LIGBASE_MAY_SKIP; |
1348 | else |
1349 | ligbase = LIGBASE_MAY_NOT_SKIP; |
1350 | } |
1351 | |
1352 | if (ligbase == LIGBASE_MAY_NOT_SKIP) |
1353 | return_trace (false); |
1354 | } |
1355 | } |
1356 | else |
1357 | { |
1358 | /* If first component was NOT attached to a previous ligature component, |
1359 | * all subsequent components should also NOT be attached to any ligature |
1360 | * component, unless they are attached to the first component itself! */ |
1361 | if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) |
1362 | return_trace (false); |
1363 | } |
1364 | |
1365 | total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); |
1366 | } |
1367 | |
1368 | *end_position = skippy_iter.idx + 1; |
1369 | |
1370 | if (p_total_component_count) |
1371 | { |
1372 | total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
1373 | *p_total_component_count = total_component_count; |
1374 | } |
1375 | |
1376 | match_positions[0] = buffer->idx; |
1377 | |
1378 | return_trace (true); |
1379 | } |
1380 | static inline bool ligate_input (hb_ot_apply_context_t *c, |
1381 | unsigned int count, /* Including the first glyph */ |
1382 | const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ |
1383 | unsigned int match_end, |
1384 | hb_codepoint_t lig_glyph, |
1385 | unsigned int total_component_count) |
1386 | { |
1387 | TRACE_APPLY (nullptr); |
1388 | |
1389 | hb_buffer_t *buffer = c->buffer; |
1390 | |
1391 | buffer->merge_clusters (buffer->idx, match_end); |
1392 | |
1393 | /* - If a base and one or more marks ligate, consider that as a base, NOT |
1394 | * ligature, such that all following marks can still attach to it. |
1395 | * https://github.com/harfbuzz/harfbuzz/issues/1109 |
1396 | * |
1397 | * - If all components of the ligature were marks, we call this a mark ligature. |
1398 | * If it *is* a mark ligature, we don't allocate a new ligature id, and leave |
1399 | * the ligature to keep its old ligature id. This will allow it to attach to |
1400 | * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, |
1401 | * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a |
1402 | * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature |
1403 | * later, we don't want them to lose their ligature id/component, otherwise |
1404 | * GPOS will fail to correctly position the mark ligature on top of the |
1405 | * LAM,LAM,HEH ligature. See: |
1406 | * https://bugzilla.gnome.org/show_bug.cgi?id=676343 |
1407 | * |
1408 | * - If a ligature is formed of components that some of which are also ligatures |
1409 | * themselves, and those ligature components had marks attached to *their* |
1410 | * components, we have to attach the marks to the new ligature component |
1411 | * positions! Now *that*'s tricky! And these marks may be following the |
1412 | * last component of the whole sequence, so we should loop forward looking |
1413 | * for them and update them. |
1414 | * |
1415 | * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a |
1416 | * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature |
1417 | * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature |
1418 | * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to |
1419 | * the new ligature with a component value of 2. |
1420 | * |
1421 | * This in fact happened to a font... See: |
1422 | * https://bugzilla.gnome.org/show_bug.cgi?id=437633 |
1423 | */ |
1424 | |
1425 | bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]); |
1426 | bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]); |
1427 | for (unsigned int i = 1; i < count; i++) |
1428 | if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]])) |
1429 | { |
1430 | is_base_ligature = false; |
1431 | is_mark_ligature = false; |
1432 | break; |
1433 | } |
1434 | bool is_ligature = !is_base_ligature && !is_mark_ligature; |
1435 | |
1436 | unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0; |
1437 | unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0; |
1438 | unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
1439 | unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
1440 | unsigned int components_so_far = last_num_components; |
1441 | |
1442 | if (is_ligature) |
1443 | { |
1444 | _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); |
1445 | if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) |
1446 | { |
1447 | _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); |
1448 | } |
1449 | } |
1450 | c->replace_glyph_with_ligature (lig_glyph, klass); |
1451 | |
1452 | for (unsigned int i = 1; i < count; i++) |
1453 | { |
1454 | while (buffer->idx < match_positions[i] && buffer->successful) |
1455 | { |
1456 | if (is_ligature) |
1457 | { |
1458 | unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
1459 | if (this_comp == 0) |
1460 | this_comp = last_num_components; |
1461 | unsigned int new_lig_comp = components_so_far - last_num_components + |
1462 | hb_min (this_comp, last_num_components); |
1463 | _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); |
1464 | } |
1465 | (void) buffer->next_glyph (); |
1466 | } |
1467 | |
1468 | last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
1469 | last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
1470 | components_so_far += last_num_components; |
1471 | |
1472 | /* Skip the base glyph */ |
1473 | buffer->idx++; |
1474 | } |
1475 | |
1476 | if (!is_mark_ligature && last_lig_id) |
1477 | { |
1478 | /* Re-adjust components for any marks following. */ |
1479 | for (unsigned i = buffer->idx; i < buffer->len; ++i) |
1480 | { |
1481 | if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break; |
1482 | |
1483 | unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]); |
1484 | if (!this_comp) break; |
1485 | |
1486 | unsigned new_lig_comp = components_so_far - last_num_components + |
1487 | hb_min (this_comp, last_num_components); |
1488 | _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); |
1489 | } |
1490 | } |
1491 | return_trace (true); |
1492 | } |
1493 | |
1494 | template <typename HBUINT> |
1495 | #ifndef HB_OPTIMIZE_SIZE |
1496 | HB_ALWAYS_INLINE |
1497 | #endif |
1498 | static bool match_backtrack (hb_ot_apply_context_t *c, |
1499 | unsigned int count, |
1500 | const HBUINT backtrack[], |
1501 | match_func_t match_func, |
1502 | const void *match_data, |
1503 | unsigned int *match_start) |
1504 | { |
1505 | TRACE_APPLY (nullptr); |
1506 | |
1507 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; |
1508 | skippy_iter.reset (c->buffer->backtrack_len ()); |
1509 | skippy_iter.set_match_func (match_func, match_data); |
1510 | skippy_iter.set_glyph_data (backtrack); |
1511 | |
1512 | for (unsigned int i = 0; i < count; i++) |
1513 | { |
1514 | unsigned unsafe_from; |
1515 | if (!skippy_iter.prev (&unsafe_from)) |
1516 | { |
1517 | *match_start = unsafe_from; |
1518 | return_trace (false); |
1519 | } |
1520 | } |
1521 | |
1522 | *match_start = skippy_iter.idx; |
1523 | return_trace (true); |
1524 | } |
1525 | |
1526 | template <typename HBUINT> |
1527 | #ifndef HB_OPTIMIZE_SIZE |
1528 | HB_ALWAYS_INLINE |
1529 | #endif |
1530 | static bool match_lookahead (hb_ot_apply_context_t *c, |
1531 | unsigned int count, |
1532 | const HBUINT lookahead[], |
1533 | match_func_t match_func, |
1534 | const void *match_data, |
1535 | unsigned int start_index, |
1536 | unsigned int *end_index) |
1537 | { |
1538 | TRACE_APPLY (nullptr); |
1539 | |
1540 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; |
1541 | skippy_iter.reset (start_index - 1); |
1542 | skippy_iter.set_match_func (match_func, match_data); |
1543 | skippy_iter.set_glyph_data (lookahead); |
1544 | |
1545 | for (unsigned int i = 0; i < count; i++) |
1546 | { |
1547 | unsigned unsafe_to; |
1548 | if (!skippy_iter.next (&unsafe_to)) |
1549 | { |
1550 | *end_index = unsafe_to; |
1551 | return_trace (false); |
1552 | } |
1553 | } |
1554 | |
1555 | *end_index = skippy_iter.idx + 1; |
1556 | return_trace (true); |
1557 | } |
1558 | |
1559 | |
1560 | |
1561 | struct LookupRecord |
1562 | { |
1563 | bool serialize (hb_serialize_context_t *c, |
1564 | const hb_map_t *lookup_map) const |
1565 | { |
1566 | TRACE_SERIALIZE (this); |
1567 | auto *out = c->embed (*this); |
1568 | if (unlikely (!out)) return_trace (false); |
1569 | |
1570 | return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW)); |
1571 | } |
1572 | |
1573 | bool sanitize (hb_sanitize_context_t *c) const |
1574 | { |
1575 | TRACE_SANITIZE (this); |
1576 | return_trace (c->check_struct (this)); |
1577 | } |
1578 | |
1579 | HBUINT16 sequenceIndex; /* Index into current glyph |
1580 | * sequence--first glyph = 0 */ |
1581 | HBUINT16 lookupListIndex; /* Lookup to apply to that |
1582 | * position--zero--based */ |
1583 | public: |
1584 | DEFINE_SIZE_STATIC (4); |
1585 | }; |
1586 | |
1587 | static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c, |
1588 | const hb_array_t<const LookupRecord> lookupRecords, |
1589 | const hb_map_t *lookup_map) |
1590 | { |
1591 | unsigned count = 0; |
1592 | for (const LookupRecord& r : lookupRecords) |
1593 | { |
1594 | if (!lookup_map->has (r.lookupListIndex)) |
1595 | continue; |
1596 | |
1597 | if (!r.serialize (c, lookup_map)) |
1598 | return 0; |
1599 | |
1600 | count++; |
1601 | } |
1602 | return count; |
1603 | } |
1604 | |
1605 | enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 }; |
1606 | |
1607 | template <typename HBUINT> |
1608 | static void context_closure_recurse_lookups (hb_closure_context_t *c, |
1609 | unsigned inputCount, const HBUINT input[], |
1610 | unsigned lookupCount, |
1611 | const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */, |
1612 | unsigned value, |
1613 | ContextFormat context_format, |
1614 | const void *data, |
1615 | intersected_glyphs_func_t intersected_glyphs_func, |
1616 | void *cache) |
1617 | { |
1618 | hb_set_t covered_seq_indicies; |
1619 | hb_set_t pos_glyphs; |
1620 | for (unsigned int i = 0; i < lookupCount; i++) |
1621 | { |
1622 | unsigned seqIndex = lookupRecord[i].sequenceIndex; |
1623 | if (seqIndex >= inputCount) continue; |
1624 | |
1625 | bool has_pos_glyphs = false; |
1626 | |
1627 | if (!covered_seq_indicies.has (seqIndex)) |
1628 | { |
1629 | has_pos_glyphs = true; |
1630 | pos_glyphs.clear (); |
1631 | if (seqIndex == 0) |
1632 | { |
1633 | switch (context_format) { |
1634 | case ContextFormat::SimpleContext: |
1635 | pos_glyphs.add (value); |
1636 | break; |
1637 | case ContextFormat::ClassBasedContext: |
1638 | intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache); |
1639 | break; |
1640 | case ContextFormat::CoverageBasedContext: |
1641 | pos_glyphs.set (c->parent_active_glyphs ()); |
1642 | break; |
1643 | } |
1644 | } |
1645 | else |
1646 | { |
1647 | const void *input_data = input; |
1648 | unsigned input_value = seqIndex - 1; |
1649 | if (context_format != ContextFormat::SimpleContext) |
1650 | { |
1651 | input_data = data; |
1652 | input_value = input[seqIndex - 1]; |
1653 | } |
1654 | |
1655 | intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache); |
1656 | } |
1657 | } |
1658 | |
1659 | covered_seq_indicies.add (seqIndex); |
1660 | hb_set_t *cur_active_glyphs = c->push_cur_active_glyphs (); |
1661 | if (unlikely (!cur_active_glyphs)) |
1662 | return; |
1663 | if (has_pos_glyphs) { |
1664 | *cur_active_glyphs = std::move (pos_glyphs); |
1665 | } else { |
1666 | *cur_active_glyphs = *c->glyphs; |
1667 | } |
1668 | |
1669 | unsigned endIndex = inputCount; |
1670 | if (context_format == ContextFormat::CoverageBasedContext) |
1671 | endIndex += 1; |
1672 | |
1673 | c->recurse (lookupRecord[i].lookupListIndex, &covered_seq_indicies, seqIndex, endIndex); |
1674 | |
1675 | c->pop_cur_done_glyphs (); |
1676 | } |
1677 | } |
1678 | |
1679 | template <typename context_t> |
1680 | static inline void recurse_lookups (context_t *c, |
1681 | unsigned int lookupCount, |
1682 | const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) |
1683 | { |
1684 | for (unsigned int i = 0; i < lookupCount; i++) |
1685 | c->recurse (lookupRecord[i].lookupListIndex); |
1686 | } |
1687 | |
1688 | static inline void apply_lookup (hb_ot_apply_context_t *c, |
1689 | unsigned int count, /* Including the first glyph */ |
1690 | unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ |
1691 | unsigned int lookupCount, |
1692 | const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ |
1693 | unsigned int match_end) |
1694 | { |
1695 | hb_buffer_t *buffer = c->buffer; |
1696 | int end; |
1697 | |
1698 | /* All positions are distance from beginning of *output* buffer. |
1699 | * Adjust. */ |
1700 | { |
1701 | unsigned int bl = buffer->backtrack_len (); |
1702 | end = bl + match_end - buffer->idx; |
1703 | |
1704 | int delta = bl - buffer->idx; |
1705 | /* Convert positions to new indexing. */ |
1706 | for (unsigned int j = 0; j < count; j++) |
1707 | match_positions[j] += delta; |
1708 | } |
1709 | |
1710 | for (unsigned int i = 0; i < lookupCount && buffer->successful; i++) |
1711 | { |
1712 | unsigned int idx = lookupRecord[i].sequenceIndex; |
1713 | if (idx >= count) |
1714 | continue; |
1715 | |
1716 | unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); |
1717 | |
1718 | /* This can happen if earlier recursed lookups deleted many entries. */ |
1719 | if (unlikely (match_positions[idx] >= orig_len)) |
1720 | continue; |
1721 | |
1722 | if (unlikely (!buffer->move_to (match_positions[idx]))) |
1723 | break; |
1724 | |
1725 | if (unlikely (buffer->max_ops <= 0)) |
1726 | break; |
1727 | |
1728 | if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ()) |
1729 | { |
1730 | if (buffer->have_output) |
1731 | c->buffer->sync_so_far (); |
1732 | c->buffer->message (c->font, |
1733 | "recursing to lookup %u at %u" , |
1734 | (unsigned) lookupRecord[i].lookupListIndex, |
1735 | buffer->idx); |
1736 | } |
1737 | |
1738 | if (!c->recurse (lookupRecord[i].lookupListIndex)) |
1739 | continue; |
1740 | |
1741 | if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ()) |
1742 | { |
1743 | if (buffer->have_output) |
1744 | c->buffer->sync_so_far (); |
1745 | c->buffer->message (c->font, |
1746 | "recursed to lookup %u" , |
1747 | (unsigned) lookupRecord[i].lookupListIndex); |
1748 | } |
1749 | |
1750 | unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); |
1751 | int delta = new_len - orig_len; |
1752 | |
1753 | if (!delta) |
1754 | continue; |
1755 | |
1756 | /* Recursed lookup changed buffer len. Adjust. |
1757 | * |
1758 | * TODO: |
1759 | * |
1760 | * Right now, if buffer length increased by n, we assume n new glyphs |
1761 | * were added right after the current position, and if buffer length |
1762 | * was decreased by n, we assume n match positions after the current |
1763 | * one where removed. The former (buffer length increased) case is |
1764 | * fine, but the decrease case can be improved in at least two ways, |
1765 | * both of which are significant: |
1766 | * |
1767 | * - If recursed-to lookup is MultipleSubst and buffer length |
1768 | * decreased, then it's current match position that was deleted, |
1769 | * NOT the one after it. |
1770 | * |
1771 | * - If buffer length was decreased by n, it does not necessarily |
1772 | * mean that n match positions where removed, as there recursed-to |
1773 | * lookup might had a different LookupFlag. Here's a constructed |
1774 | * case of that: |
1775 | * https://github.com/harfbuzz/harfbuzz/discussions/3538 |
1776 | * |
1777 | * It should be possible to construct tests for both of these cases. |
1778 | */ |
1779 | |
1780 | end += delta; |
1781 | if (end < int (match_positions[idx])) |
1782 | { |
1783 | /* End might end up being smaller than match_positions[idx] if the recursed |
1784 | * lookup ended up removing many items. |
1785 | * Just never rewind end beyond start of current position, since that is |
1786 | * not possible in the recursed lookup. Also adjust delta as such. |
1787 | * |
1788 | * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 |
1789 | * https://github.com/harfbuzz/harfbuzz/issues/1611 |
1790 | */ |
1791 | delta += match_positions[idx] - end; |
1792 | end = match_positions[idx]; |
1793 | } |
1794 | |
1795 | unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ |
1796 | |
1797 | if (delta > 0) |
1798 | { |
1799 | if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH)) |
1800 | break; |
1801 | } |
1802 | else |
1803 | { |
1804 | /* NOTE: delta is non-positive. */ |
1805 | delta = hb_max (delta, (int) next - (int) count); |
1806 | next -= delta; |
1807 | } |
1808 | |
1809 | /* Shift! */ |
1810 | memmove (match_positions + next + delta, match_positions + next, |
1811 | (count - next) * sizeof (match_positions[0])); |
1812 | next += delta; |
1813 | count += delta; |
1814 | |
1815 | /* Fill in new entries. */ |
1816 | for (unsigned int j = idx + 1; j < next; j++) |
1817 | match_positions[j] = match_positions[j - 1] + 1; |
1818 | |
1819 | /* And fixup the rest. */ |
1820 | for (; next < count; next++) |
1821 | match_positions[next] += delta; |
1822 | } |
1823 | |
1824 | (void) buffer->move_to (end); |
1825 | } |
1826 | |
1827 | |
1828 | |
1829 | /* Contextual lookups */ |
1830 | |
1831 | struct ContextClosureLookupContext |
1832 | { |
1833 | ContextClosureFuncs funcs; |
1834 | ContextFormat context_format; |
1835 | const void *intersects_data; |
1836 | void *intersects_cache; |
1837 | void *intersected_glyphs_cache; |
1838 | }; |
1839 | |
1840 | struct ContextCollectGlyphsLookupContext |
1841 | { |
1842 | ContextCollectGlyphsFuncs funcs; |
1843 | const void *collect_data; |
1844 | }; |
1845 | |
1846 | struct ContextApplyLookupContext |
1847 | { |
1848 | ContextApplyFuncs funcs; |
1849 | const void *match_data; |
1850 | }; |
1851 | |
1852 | template <typename HBUINT> |
1853 | static inline bool context_intersects (const hb_set_t *glyphs, |
1854 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1855 | const HBUINT input[], /* Array of input values--start with second glyph */ |
1856 | ContextClosureLookupContext &lookup_context) |
1857 | { |
1858 | return array_is_subset_of (glyphs, |
1859 | inputCount ? inputCount - 1 : 0, input, |
1860 | lookup_context.funcs.intersects, |
1861 | lookup_context.intersects_data, |
1862 | lookup_context.intersects_cache); |
1863 | } |
1864 | |
1865 | template <typename HBUINT> |
1866 | static inline void context_closure_lookup (hb_closure_context_t *c, |
1867 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1868 | const HBUINT input[], /* Array of input values--start with second glyph */ |
1869 | unsigned int lookupCount, |
1870 | const LookupRecord lookupRecord[], |
1871 | unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */ |
1872 | ContextClosureLookupContext &lookup_context) |
1873 | { |
1874 | if (context_intersects (c->glyphs, |
1875 | inputCount, input, |
1876 | lookup_context)) |
1877 | context_closure_recurse_lookups (c, |
1878 | inputCount, input, |
1879 | lookupCount, lookupRecord, |
1880 | value, |
1881 | lookup_context.context_format, |
1882 | lookup_context.intersects_data, |
1883 | lookup_context.funcs.intersected_glyphs, |
1884 | lookup_context.intersected_glyphs_cache); |
1885 | } |
1886 | |
1887 | template <typename HBUINT> |
1888 | static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, |
1889 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1890 | const HBUINT input[], /* Array of input values--start with second glyph */ |
1891 | unsigned int lookupCount, |
1892 | const LookupRecord lookupRecord[], |
1893 | ContextCollectGlyphsLookupContext &lookup_context) |
1894 | { |
1895 | collect_array (c, c->input, |
1896 | inputCount ? inputCount - 1 : 0, input, |
1897 | lookup_context.funcs.collect, lookup_context.collect_data); |
1898 | recurse_lookups (c, |
1899 | lookupCount, lookupRecord); |
1900 | } |
1901 | |
1902 | template <typename HBUINT> |
1903 | static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, |
1904 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1905 | const HBUINT input[], /* Array of input values--start with second glyph */ |
1906 | unsigned int lookupCount HB_UNUSED, |
1907 | const LookupRecord lookupRecord[] HB_UNUSED, |
1908 | const ContextApplyLookupContext &lookup_context) |
1909 | { |
1910 | return would_match_input (c, |
1911 | inputCount, input, |
1912 | lookup_context.funcs.match, lookup_context.match_data); |
1913 | } |
1914 | |
1915 | template <typename HBUINT> |
1916 | HB_ALWAYS_INLINE |
1917 | static bool context_apply_lookup (hb_ot_apply_context_t *c, |
1918 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1919 | const HBUINT input[], /* Array of input values--start with second glyph */ |
1920 | unsigned int lookupCount, |
1921 | const LookupRecord lookupRecord[], |
1922 | const ContextApplyLookupContext &lookup_context) |
1923 | { |
1924 | unsigned match_end = 0; |
1925 | unsigned match_positions[HB_MAX_CONTEXT_LENGTH]; |
1926 | if (match_input (c, |
1927 | inputCount, input, |
1928 | lookup_context.funcs.match, lookup_context.match_data, |
1929 | &match_end, match_positions)) |
1930 | { |
1931 | c->buffer->unsafe_to_break (c->buffer->idx, match_end); |
1932 | apply_lookup (c, |
1933 | inputCount, match_positions, |
1934 | lookupCount, lookupRecord, |
1935 | match_end); |
1936 | return true; |
1937 | } |
1938 | else |
1939 | { |
1940 | c->buffer->unsafe_to_concat (c->buffer->idx, match_end); |
1941 | return false; |
1942 | } |
1943 | } |
1944 | |
1945 | template <typename Types> |
1946 | struct Rule |
1947 | { |
1948 | template <typename T> |
1949 | friend struct RuleSet; |
1950 | |
1951 | bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const |
1952 | { |
1953 | return context_intersects (glyphs, |
1954 | inputCount, inputZ.arrayZ, |
1955 | lookup_context); |
1956 | } |
1957 | |
1958 | void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const |
1959 | { |
1960 | if (unlikely (c->lookup_limit_exceeded ())) return; |
1961 | |
1962 | const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> |
1963 | (inputZ.as_array ((inputCount ? inputCount - 1 : 0))); |
1964 | context_closure_lookup (c, |
1965 | inputCount, inputZ.arrayZ, |
1966 | lookupCount, lookupRecord.arrayZ, |
1967 | value, lookup_context); |
1968 | } |
1969 | |
1970 | void closure_lookups (hb_closure_lookups_context_t *c, |
1971 | ContextClosureLookupContext &lookup_context) const |
1972 | { |
1973 | if (unlikely (c->lookup_limit_exceeded ())) return; |
1974 | if (!intersects (c->glyphs, lookup_context)) return; |
1975 | |
1976 | const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> |
1977 | (inputZ.as_array (inputCount ? inputCount - 1 : 0)); |
1978 | recurse_lookups (c, lookupCount, lookupRecord.arrayZ); |
1979 | } |
1980 | |
1981 | void collect_glyphs (hb_collect_glyphs_context_t *c, |
1982 | ContextCollectGlyphsLookupContext &lookup_context) const |
1983 | { |
1984 | const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> |
1985 | (inputZ.as_array (inputCount ? inputCount - 1 : 0)); |
1986 | context_collect_glyphs_lookup (c, |
1987 | inputCount, inputZ.arrayZ, |
1988 | lookupCount, lookupRecord.arrayZ, |
1989 | lookup_context); |
1990 | } |
1991 | |
1992 | bool would_apply (hb_would_apply_context_t *c, |
1993 | const ContextApplyLookupContext &lookup_context) const |
1994 | { |
1995 | const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> |
1996 | (inputZ.as_array (inputCount ? inputCount - 1 : 0)); |
1997 | return context_would_apply_lookup (c, |
1998 | inputCount, inputZ.arrayZ, |
1999 | lookupCount, lookupRecord.arrayZ, |
2000 | lookup_context); |
2001 | } |
2002 | |
2003 | bool apply (hb_ot_apply_context_t *c, |
2004 | const ContextApplyLookupContext &lookup_context) const |
2005 | { |
2006 | TRACE_APPLY (this); |
2007 | const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> |
2008 | (inputZ.as_array (inputCount ? inputCount - 1 : 0)); |
2009 | return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context)); |
2010 | } |
2011 | |
2012 | bool serialize (hb_serialize_context_t *c, |
2013 | const hb_map_t *input_mapping, /* old->new glyphid or class mapping */ |
2014 | const hb_map_t *lookup_map) const |
2015 | { |
2016 | TRACE_SERIALIZE (this); |
2017 | auto *out = c->start_embed (this); |
2018 | if (unlikely (!c->extend_min (out))) return_trace (false); |
2019 | |
2020 | out->inputCount = inputCount; |
2021 | const auto input = inputZ.as_array (inputCount - 1); |
2022 | for (const auto org : input) |
2023 | { |
2024 | HBUINT16 d; |
2025 | d = input_mapping->get (org); |
2026 | c->copy (d); |
2027 | } |
2028 | |
2029 | const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> |
2030 | (inputZ.as_array ((inputCount ? inputCount - 1 : 0))); |
2031 | |
2032 | unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map); |
2033 | return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW)); |
2034 | } |
2035 | |
2036 | bool subset (hb_subset_context_t *c, |
2037 | const hb_map_t *lookup_map, |
2038 | const hb_map_t *klass_map = nullptr) const |
2039 | { |
2040 | TRACE_SUBSET (this); |
2041 | if (unlikely (!inputCount)) return_trace (false); |
2042 | const auto input = inputZ.as_array (inputCount - 1); |
2043 | |
2044 | const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map; |
2045 | if (!hb_all (input, mapping)) return_trace (false); |
2046 | return_trace (serialize (c->serializer, mapping, lookup_map)); |
2047 | } |
2048 | |
2049 | public: |
2050 | bool sanitize (hb_sanitize_context_t *c) const |
2051 | { |
2052 | TRACE_SANITIZE (this); |
2053 | return_trace (c->check_struct (this) && |
2054 | c->check_range (inputZ.arrayZ, |
2055 | inputZ.item_size * (inputCount ? inputCount - 1 : 0) + |
2056 | LookupRecord::static_size * lookupCount)); |
2057 | } |
2058 | |
2059 | protected: |
2060 | HBUINT16 inputCount; /* Total number of glyphs in input |
2061 | * glyph sequence--includes the first |
2062 | * glyph */ |
2063 | HBUINT16 lookupCount; /* Number of LookupRecords */ |
2064 | UnsizedArrayOf<typename Types::HBUINT> |
2065 | inputZ; /* Array of match inputs--start with |
2066 | * second glyph */ |
2067 | /*UnsizedArrayOf<LookupRecord> |
2068 | lookupRecordX;*/ /* Array of LookupRecords--in |
2069 | * design order */ |
2070 | public: |
2071 | DEFINE_SIZE_ARRAY (4, inputZ); |
2072 | }; |
2073 | |
2074 | template <typename Types> |
2075 | struct RuleSet |
2076 | { |
2077 | using Rule = OT::Rule<Types>; |
2078 | |
2079 | bool intersects (const hb_set_t *glyphs, |
2080 | ContextClosureLookupContext &lookup_context) const |
2081 | { |
2082 | return |
2083 | + hb_iter (rule) |
2084 | | hb_map (hb_add (this)) |
2085 | | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); }) |
2086 | | hb_any |
2087 | ; |
2088 | } |
2089 | |
2090 | void closure (hb_closure_context_t *c, unsigned value, |
2091 | ContextClosureLookupContext &lookup_context) const |
2092 | { |
2093 | if (unlikely (c->lookup_limit_exceeded ())) return; |
2094 | |
2095 | return |
2096 | + hb_iter (rule) |
2097 | | hb_map (hb_add (this)) |
2098 | | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); }) |
2099 | ; |
2100 | } |
2101 | |
2102 | void closure_lookups (hb_closure_lookups_context_t *c, |
2103 | ContextClosureLookupContext &lookup_context) const |
2104 | { |
2105 | if (unlikely (c->lookup_limit_exceeded ())) return; |
2106 | + hb_iter (rule) |
2107 | | hb_map (hb_add (this)) |
2108 | | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); }) |
2109 | ; |
2110 | } |
2111 | |
2112 | void collect_glyphs (hb_collect_glyphs_context_t *c, |
2113 | ContextCollectGlyphsLookupContext &lookup_context) const |
2114 | { |
2115 | return |
2116 | + hb_iter (rule) |
2117 | | hb_map (hb_add (this)) |
2118 | | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); }) |
2119 | ; |
2120 | } |
2121 | |
2122 | bool would_apply (hb_would_apply_context_t *c, |
2123 | const ContextApplyLookupContext &lookup_context) const |
2124 | { |
2125 | return |
2126 | + hb_iter (rule) |
2127 | | hb_map (hb_add (this)) |
2128 | | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); }) |
2129 | | hb_any |
2130 | ; |
2131 | } |
2132 | |
2133 | bool apply (hb_ot_apply_context_t *c, |
2134 | const ContextApplyLookupContext &lookup_context) const |
2135 | { |
2136 | TRACE_APPLY (this); |
2137 | |
2138 | unsigned num_rules = rule.len; |
2139 | |
2140 | #ifndef HB_NO_OT_RULESETS_FAST_PATH |
2141 | if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4) |
2142 | #endif |
2143 | { |
2144 | slow: |
2145 | return_trace ( |
2146 | + hb_iter (rule) |
2147 | | hb_map (hb_add (this)) |
2148 | | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); }) |
2149 | | hb_any |
2150 | ) |
2151 | ; |
2152 | } |
2153 | |
2154 | /* This version is optimized for speed by matching the first & second |
2155 | * components of the rule here, instead of calling into the matching code. |
2156 | * |
2157 | * Replicated from LigatureSet::apply(). */ |
2158 | |
2159 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
2160 | skippy_iter.reset (c->buffer->idx); |
2161 | skippy_iter.set_match_func (match_always, nullptr); |
2162 | skippy_iter.set_glyph_data ((HBUINT16 *) nullptr); |
2163 | unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0; |
2164 | hb_glyph_info_t *first = nullptr, *second = nullptr; |
2165 | bool matched = skippy_iter.next (); |
2166 | if (likely (matched)) |
2167 | { |
2168 | first = &c->buffer->info[skippy_iter.idx]; |
2169 | unsafe_to = skippy_iter.idx + 1; |
2170 | |
2171 | if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])) |
2172 | { |
2173 | /* Can't use the fast path if eg. the next char is a default-ignorable |
2174 | * or other skippable. */ |
2175 | goto slow; |
2176 | } |
2177 | } |
2178 | else |
2179 | { |
2180 | /* Failed to match a next glyph. Only try applying rules that have |
2181 | * no further input. */ |
2182 | return_trace ( |
2183 | + hb_iter (rule) |
2184 | | hb_map (hb_add (this)) |
2185 | | hb_filter ([&] (const Rule &_) { return _.inputCount <= 1; }) |
2186 | | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); }) |
2187 | | hb_any |
2188 | ) |
2189 | ; |
2190 | } |
2191 | matched = skippy_iter.next (); |
2192 | if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))) |
2193 | { |
2194 | second = &c->buffer->info[skippy_iter.idx]; |
2195 | unsafe_to2 = skippy_iter.idx + 1; |
2196 | } |
2197 | |
2198 | auto match_input = lookup_context.funcs.match; |
2199 | auto *input_data = lookup_context.match_data; |
2200 | for (unsigned int i = 0; i < num_rules; i++) |
2201 | { |
2202 | const auto &r = this+rule.arrayZ[i]; |
2203 | |
2204 | const auto &input = r.inputZ; |
2205 | |
2206 | if (r.inputCount <= 1 || |
2207 | (!match_input || |
2208 | match_input (*first, input.arrayZ[0], input_data))) |
2209 | { |
2210 | if (!second || |
2211 | (r.inputCount <= 2 || |
2212 | (!match_input || |
2213 | match_input (*second, input.arrayZ[1], input_data))) |
2214 | ) |
2215 | { |
2216 | if (r.apply (c, lookup_context)) |
2217 | { |
2218 | if (unsafe_to != (unsigned) -1) |
2219 | c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to); |
2220 | return_trace (true); |
2221 | } |
2222 | } |
2223 | else |
2224 | unsafe_to = unsafe_to2; |
2225 | } |
2226 | else |
2227 | { |
2228 | if (unsafe_to == (unsigned) -1) |
2229 | unsafe_to = unsafe_to1; |
2230 | } |
2231 | } |
2232 | if (likely (unsafe_to != (unsigned) -1)) |
2233 | c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to); |
2234 | |
2235 | return_trace (false); |
2236 | } |
2237 | |
2238 | bool subset (hb_subset_context_t *c, |
2239 | const hb_map_t *lookup_map, |
2240 | const hb_map_t *klass_map = nullptr) const |
2241 | { |
2242 | TRACE_SUBSET (this); |
2243 | |
2244 | auto snap = c->serializer->snapshot (); |
2245 | auto *out = c->serializer->start_embed (*this); |
2246 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
2247 | |
2248 | for (const Offset16To<Rule>& _ : rule) |
2249 | { |
2250 | if (!_) continue; |
2251 | auto o_snap = c->serializer->snapshot (); |
2252 | auto *o = out->rule.serialize_append (c->serializer); |
2253 | if (unlikely (!o)) continue; |
2254 | |
2255 | if (!o->serialize_subset (c, _, this, lookup_map, klass_map)) |
2256 | { |
2257 | out->rule.pop (); |
2258 | c->serializer->revert (o_snap); |
2259 | } |
2260 | } |
2261 | |
2262 | bool ret = bool (out->rule); |
2263 | if (!ret) c->serializer->revert (snap); |
2264 | |
2265 | return_trace (ret); |
2266 | } |
2267 | |
2268 | bool sanitize (hb_sanitize_context_t *c) const |
2269 | { |
2270 | TRACE_SANITIZE (this); |
2271 | return_trace (rule.sanitize (c, this)); |
2272 | } |
2273 | |
2274 | protected: |
2275 | Array16OfOffset16To<Rule> |
2276 | rule; /* Array of Rule tables |
2277 | * ordered by preference */ |
2278 | public: |
2279 | DEFINE_SIZE_ARRAY (2, rule); |
2280 | }; |
2281 | |
2282 | |
2283 | template <typename Types> |
2284 | struct ContextFormat1_4 |
2285 | { |
2286 | using RuleSet = OT::RuleSet<Types>; |
2287 | |
2288 | bool intersects (const hb_set_t *glyphs) const |
2289 | { |
2290 | struct ContextClosureLookupContext lookup_context = { |
2291 | {intersects_glyph, intersected_glyph}, |
2292 | ContextFormat::SimpleContext, |
2293 | nullptr |
2294 | }; |
2295 | |
2296 | return |
2297 | + hb_zip (this+coverage, ruleSet) |
2298 | | hb_filter (*glyphs, hb_first) |
2299 | | hb_map (hb_second) |
2300 | | hb_map (hb_add (this)) |
2301 | | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); }) |
2302 | | hb_any |
2303 | ; |
2304 | } |
2305 | |
2306 | bool may_have_non_1to1 () const |
2307 | { return true; } |
2308 | |
2309 | void closure (hb_closure_context_t *c) const |
2310 | { |
2311 | hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs (); |
2312 | if (unlikely (!cur_active_glyphs)) return; |
2313 | get_coverage ().intersect_set (c->previous_parent_active_glyphs (), *cur_active_glyphs); |
2314 | |
2315 | struct ContextClosureLookupContext lookup_context = { |
2316 | {intersects_glyph, intersected_glyph}, |
2317 | ContextFormat::SimpleContext, |
2318 | nullptr |
2319 | }; |
2320 | |
2321 | + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len)) |
2322 | | hb_filter ([&] (hb_codepoint_t _) { |
2323 | return c->previous_parent_active_glyphs ().has (_); |
2324 | }, hb_first) |
2325 | | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); }) |
2326 | | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); }) |
2327 | ; |
2328 | |
2329 | c->pop_cur_done_glyphs (); |
2330 | } |
2331 | |
2332 | void closure_lookups (hb_closure_lookups_context_t *c) const |
2333 | { |
2334 | struct ContextClosureLookupContext lookup_context = { |
2335 | {intersects_glyph, nullptr}, |
2336 | ContextFormat::SimpleContext, |
2337 | nullptr |
2338 | }; |
2339 | |
2340 | + hb_zip (this+coverage, ruleSet) |
2341 | | hb_filter (*c->glyphs, hb_first) |
2342 | | hb_map (hb_second) |
2343 | | hb_map (hb_add (this)) |
2344 | | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); }) |
2345 | ; |
2346 | } |
2347 | |
2348 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {} |
2349 | |
2350 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
2351 | { |
2352 | (this+coverage).collect_coverage (c->input); |
2353 | |
2354 | struct ContextCollectGlyphsLookupContext lookup_context = { |
2355 | {collect_glyph}, |
2356 | nullptr |
2357 | }; |
2358 | |
2359 | + hb_iter (ruleSet) |
2360 | | hb_map (hb_add (this)) |
2361 | | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); }) |
2362 | ; |
2363 | } |
2364 | |
2365 | bool would_apply (hb_would_apply_context_t *c) const |
2366 | { |
2367 | const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; |
2368 | struct ContextApplyLookupContext lookup_context = { |
2369 | {match_glyph}, |
2370 | nullptr |
2371 | }; |
2372 | return rule_set.would_apply (c, lookup_context); |
2373 | } |
2374 | |
2375 | const Coverage &get_coverage () const { return this+coverage; } |
2376 | |
2377 | bool apply (hb_ot_apply_context_t *c) const |
2378 | { |
2379 | TRACE_APPLY (this); |
2380 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
2381 | if (likely (index == NOT_COVERED)) |
2382 | return_trace (false); |
2383 | |
2384 | const RuleSet &rule_set = this+ruleSet[index]; |
2385 | struct ContextApplyLookupContext lookup_context = { |
2386 | {match_glyph}, |
2387 | nullptr |
2388 | }; |
2389 | return_trace (rule_set.apply (c, lookup_context)); |
2390 | } |
2391 | |
2392 | bool subset (hb_subset_context_t *c) const |
2393 | { |
2394 | TRACE_SUBSET (this); |
2395 | const hb_set_t &glyphset = *c->plan->glyphset_gsub (); |
2396 | const hb_map_t &glyph_map = *c->plan->glyph_map; |
2397 | |
2398 | auto *out = c->serializer->start_embed (*this); |
2399 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
2400 | out->format = format; |
2401 | |
2402 | const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups; |
2403 | hb_sorted_vector_t<hb_codepoint_t> new_coverage; |
2404 | + hb_zip (this+coverage, ruleSet) |
2405 | | hb_filter (glyphset, hb_first) |
2406 | | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second) |
2407 | | hb_map (hb_first) |
2408 | | hb_map (glyph_map) |
2409 | | hb_sink (new_coverage) |
2410 | ; |
2411 | |
2412 | out->coverage.serialize_serialize (c->serializer, new_coverage.iter ()); |
2413 | return_trace (bool (new_coverage)); |
2414 | } |
2415 | |
2416 | bool sanitize (hb_sanitize_context_t *c) const |
2417 | { |
2418 | TRACE_SANITIZE (this); |
2419 | return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); |
2420 | } |
2421 | |
2422 | protected: |
2423 | HBUINT16 format; /* Format identifier--format = 1 */ |
2424 | typename Types::template OffsetTo<Coverage> |
2425 | coverage; /* Offset to Coverage table--from |
2426 | * beginning of table */ |
2427 | Array16Of<typename Types::template OffsetTo<RuleSet>> |
2428 | ruleSet; /* Array of RuleSet tables |
2429 | * ordered by Coverage Index */ |
2430 | public: |
2431 | DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet); |
2432 | }; |
2433 | |
2434 | |
2435 | template <typename Types> |
2436 | struct ContextFormat2_5 |
2437 | { |
2438 | using RuleSet = OT::RuleSet<SmallTypes>; |
2439 | |
2440 | bool intersects (const hb_set_t *glyphs) const |
2441 | { |
2442 | if (!(this+coverage).intersects (glyphs)) |
2443 | return false; |
2444 | |
2445 | const ClassDef &class_def = this+classDef; |
2446 | |
2447 | hb_map_t cache; |
2448 | struct ContextClosureLookupContext lookup_context = { |
2449 | {intersects_class, nullptr}, |
2450 | ContextFormat::ClassBasedContext, |
2451 | &class_def, |
2452 | &cache |
2453 | }; |
2454 | |
2455 | hb_set_t retained_coverage_glyphs; |
2456 | (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs); |
2457 | |
2458 | hb_set_t coverage_glyph_classes; |
2459 | class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes); |
2460 | |
2461 | |
2462 | return |
2463 | + hb_iter (ruleSet) |
2464 | | hb_map (hb_add (this)) |
2465 | | hb_enumerate |
2466 | | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p) |
2467 | { return class_def.intersects_class (glyphs, p.first) && |
2468 | coverage_glyph_classes.has (p.first) && |
2469 | p.second.intersects (glyphs, lookup_context); }) |
2470 | | hb_any |
2471 | ; |
2472 | } |
2473 | |
2474 | bool may_have_non_1to1 () const |
2475 | { return true; } |
2476 | |
2477 | void closure (hb_closure_context_t *c) const |
2478 | { |
2479 | if (!(this+coverage).intersects (c->glyphs)) |
2480 | return; |
2481 | |
2482 | hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs (); |
2483 | if (unlikely (!cur_active_glyphs)) return; |
2484 | get_coverage ().intersect_set (c->previous_parent_active_glyphs (), |
2485 | *cur_active_glyphs); |
2486 | |
2487 | const ClassDef &class_def = this+classDef; |
2488 | |
2489 | hb_map_t cache; |
2490 | intersected_class_cache_t intersected_cache; |
2491 | struct ContextClosureLookupContext lookup_context = { |
2492 | {intersects_class, intersected_class_glyphs}, |
2493 | ContextFormat::ClassBasedContext, |
2494 | &class_def, |
2495 | &cache, |
2496 | &intersected_cache |
2497 | }; |
2498 | |
2499 | + hb_enumerate (ruleSet) |
2500 | | hb_filter ([&] (unsigned _) |
2501 | { return class_def.intersects_class (&c->parent_active_glyphs (), _); }, |
2502 | hb_first) |
2503 | | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<RuleSet>&> _) |
2504 | { |
2505 | const RuleSet& rule_set = this+_.second; |
2506 | rule_set.closure (c, _.first, lookup_context); |
2507 | }) |
2508 | ; |
2509 | |
2510 | c->pop_cur_done_glyphs (); |
2511 | } |
2512 | |
2513 | void closure_lookups (hb_closure_lookups_context_t *c) const |
2514 | { |
2515 | if (!(this+coverage).intersects (c->glyphs)) |
2516 | return; |
2517 | |
2518 | const ClassDef &class_def = this+classDef; |
2519 | |
2520 | hb_map_t cache; |
2521 | struct ContextClosureLookupContext lookup_context = { |
2522 | {intersects_class, nullptr}, |
2523 | ContextFormat::ClassBasedContext, |
2524 | &class_def, |
2525 | &cache |
2526 | }; |
2527 | |
2528 | + hb_iter (ruleSet) |
2529 | | hb_map (hb_add (this)) |
2530 | | hb_enumerate |
2531 | | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p) |
2532 | { return class_def.intersects_class (c->glyphs, p.first); }) |
2533 | | hb_map (hb_second) |
2534 | | hb_apply ([&] (const RuleSet & _) |
2535 | { _.closure_lookups (c, lookup_context); }); |
2536 | } |
2537 | |
2538 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {} |
2539 | |
2540 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
2541 | { |
2542 | (this+coverage).collect_coverage (c->input); |
2543 | |
2544 | const ClassDef &class_def = this+classDef; |
2545 | struct ContextCollectGlyphsLookupContext lookup_context = { |
2546 | {collect_class}, |
2547 | &class_def |
2548 | }; |
2549 | |
2550 | + hb_iter (ruleSet) |
2551 | | hb_map (hb_add (this)) |
2552 | | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); }) |
2553 | ; |
2554 | } |
2555 | |
2556 | bool would_apply (hb_would_apply_context_t *c) const |
2557 | { |
2558 | const ClassDef &class_def = this+classDef; |
2559 | unsigned int index = class_def.get_class (c->glyphs[0]); |
2560 | const RuleSet &rule_set = this+ruleSet[index]; |
2561 | struct ContextApplyLookupContext lookup_context = { |
2562 | {match_class}, |
2563 | &class_def |
2564 | }; |
2565 | return rule_set.would_apply (c, lookup_context); |
2566 | } |
2567 | |
2568 | const Coverage &get_coverage () const { return this+coverage; } |
2569 | |
2570 | unsigned cache_cost () const |
2571 | { |
2572 | unsigned c = (this+classDef).cost () * ruleSet.len; |
2573 | return c >= 4 ? c : 0; |
2574 | } |
2575 | bool cache_func (hb_ot_apply_context_t *c, bool enter) const |
2576 | { |
2577 | if (enter) |
2578 | { |
2579 | if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable)) |
2580 | return false; |
2581 | auto &info = c->buffer->info; |
2582 | unsigned count = c->buffer->len; |
2583 | for (unsigned i = 0; i < count; i++) |
2584 | info[i].syllable() = 255; |
2585 | c->new_syllables = 255; |
2586 | return true; |
2587 | } |
2588 | else |
2589 | { |
2590 | c->new_syllables = (unsigned) -1; |
2591 | HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable); |
2592 | return true; |
2593 | } |
2594 | } |
2595 | |
2596 | bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); } |
2597 | bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); } |
2598 | bool _apply (hb_ot_apply_context_t *c, bool cached) const |
2599 | { |
2600 | TRACE_APPLY (this); |
2601 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
2602 | if (likely (index == NOT_COVERED)) return_trace (false); |
2603 | |
2604 | const ClassDef &class_def = this+classDef; |
2605 | |
2606 | struct ContextApplyLookupContext lookup_context = { |
2607 | {cached ? match_class_cached : match_class}, |
2608 | &class_def |
2609 | }; |
2610 | |
2611 | if (cached && c->buffer->cur().syllable() < 255) |
2612 | index = c->buffer->cur().syllable (); |
2613 | else |
2614 | index = class_def.get_class (c->buffer->cur().codepoint); |
2615 | const RuleSet &rule_set = this+ruleSet[index]; |
2616 | return_trace (rule_set.apply (c, lookup_context)); |
2617 | } |
2618 | |
2619 | bool subset (hb_subset_context_t *c) const |
2620 | { |
2621 | TRACE_SUBSET (this); |
2622 | auto *out = c->serializer->start_embed (*this); |
2623 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
2624 | out->format = format; |
2625 | if (unlikely (!out->coverage.serialize_subset (c, coverage, this))) |
2626 | return_trace (false); |
2627 | |
2628 | hb_map_t klass_map; |
2629 | out->classDef.serialize_subset (c, classDef, this, &klass_map); |
2630 | |
2631 | const hb_set_t* glyphset = c->plan->glyphset_gsub (); |
2632 | hb_set_t retained_coverage_glyphs; |
2633 | (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs); |
2634 | |
2635 | hb_set_t coverage_glyph_classes; |
2636 | (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes); |
2637 | |
2638 | const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups; |
2639 | bool ret = true; |
2640 | int non_zero_index = -1, index = 0; |
2641 | auto snapshot = c->serializer->snapshot(); |
2642 | for (const auto& _ : + hb_enumerate (ruleSet) |
2643 | | hb_filter (klass_map, hb_first)) |
2644 | { |
2645 | auto *o = out->ruleSet.serialize_append (c->serializer); |
2646 | if (unlikely (!o)) |
2647 | { |
2648 | ret = false; |
2649 | break; |
2650 | } |
2651 | |
2652 | if (coverage_glyph_classes.has (_.first) && |
2653 | o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) { |
2654 | non_zero_index = index; |
2655 | snapshot = c->serializer->snapshot(); |
2656 | } |
2657 | |
2658 | index++; |
2659 | } |
2660 | |
2661 | if (!ret || non_zero_index == -1) return_trace (false); |
2662 | |
2663 | //prune empty trailing ruleSets |
2664 | --index; |
2665 | while (index > non_zero_index) |
2666 | { |
2667 | out->ruleSet.pop (); |
2668 | index--; |
2669 | } |
2670 | c->serializer->revert (snapshot); |
2671 | |
2672 | return_trace (bool (out->ruleSet)); |
2673 | } |
2674 | |
2675 | bool sanitize (hb_sanitize_context_t *c) const |
2676 | { |
2677 | TRACE_SANITIZE (this); |
2678 | return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); |
2679 | } |
2680 | |
2681 | protected: |
2682 | HBUINT16 format; /* Format identifier--format = 2 */ |
2683 | typename Types::template OffsetTo<Coverage> |
2684 | coverage; /* Offset to Coverage table--from |
2685 | * beginning of table */ |
2686 | typename Types::template OffsetTo<ClassDef> |
2687 | classDef; /* Offset to glyph ClassDef table--from |
2688 | * beginning of table */ |
2689 | Array16Of<typename Types::template OffsetTo<RuleSet>> |
2690 | ruleSet; /* Array of RuleSet tables |
2691 | * ordered by class */ |
2692 | public: |
2693 | DEFINE_SIZE_ARRAY (4 + 2 * Types::size, ruleSet); |
2694 | }; |
2695 | |
2696 | |
2697 | struct ContextFormat3 |
2698 | { |
2699 | using RuleSet = OT::RuleSet<SmallTypes>; |
2700 | |
2701 | bool intersects (const hb_set_t *glyphs) const |
2702 | { |
2703 | if (!(this+coverageZ[0]).intersects (glyphs)) |
2704 | return false; |
2705 | |
2706 | struct ContextClosureLookupContext lookup_context = { |
2707 | {intersects_coverage, nullptr}, |
2708 | ContextFormat::CoverageBasedContext, |
2709 | this |
2710 | }; |
2711 | return context_intersects (glyphs, |
2712 | glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), |
2713 | lookup_context); |
2714 | } |
2715 | |
2716 | bool may_have_non_1to1 () const |
2717 | { return true; } |
2718 | |
2719 | void closure (hb_closure_context_t *c) const |
2720 | { |
2721 | if (!(this+coverageZ[0]).intersects (c->glyphs)) |
2722 | return; |
2723 | |
2724 | hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs (); |
2725 | if (unlikely (!cur_active_glyphs)) return; |
2726 | get_coverage ().intersect_set (c->previous_parent_active_glyphs (), |
2727 | *cur_active_glyphs); |
2728 | |
2729 | const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount)); |
2730 | struct ContextClosureLookupContext lookup_context = { |
2731 | {intersects_coverage, intersected_coverage_glyphs}, |
2732 | ContextFormat::CoverageBasedContext, |
2733 | this |
2734 | }; |
2735 | context_closure_lookup (c, |
2736 | glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), |
2737 | lookupCount, lookupRecord, |
2738 | 0, lookup_context); |
2739 | |
2740 | c->pop_cur_done_glyphs (); |
2741 | } |
2742 | |
2743 | void closure_lookups (hb_closure_lookups_context_t *c) const |
2744 | { |
2745 | if (!intersects (c->glyphs)) |
2746 | return; |
2747 | const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount)); |
2748 | recurse_lookups (c, lookupCount, lookupRecord); |
2749 | } |
2750 | |
2751 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {} |
2752 | |
2753 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
2754 | { |
2755 | (this+coverageZ[0]).collect_coverage (c->input); |
2756 | |
2757 | const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount)); |
2758 | struct ContextCollectGlyphsLookupContext lookup_context = { |
2759 | {collect_coverage}, |
2760 | this |
2761 | }; |
2762 | |
2763 | context_collect_glyphs_lookup (c, |
2764 | glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), |
2765 | lookupCount, lookupRecord, |
2766 | lookup_context); |
2767 | } |
2768 | |
2769 | bool would_apply (hb_would_apply_context_t *c) const |
2770 | { |
2771 | const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount)); |
2772 | struct ContextApplyLookupContext lookup_context = { |
2773 | {match_coverage}, |
2774 | this |
2775 | }; |
2776 | return context_would_apply_lookup (c, |
2777 | glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), |
2778 | lookupCount, lookupRecord, |
2779 | lookup_context); |
2780 | } |
2781 | |
2782 | const Coverage &get_coverage () const { return this+coverageZ[0]; } |
2783 | |
2784 | bool apply (hb_ot_apply_context_t *c) const |
2785 | { |
2786 | TRACE_APPLY (this); |
2787 | unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); |
2788 | if (likely (index == NOT_COVERED)) return_trace (false); |
2789 | |
2790 | const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount)); |
2791 | struct ContextApplyLookupContext lookup_context = { |
2792 | {match_coverage}, |
2793 | this |
2794 | }; |
2795 | return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context)); |
2796 | } |
2797 | |
2798 | bool subset (hb_subset_context_t *c) const |
2799 | { |
2800 | TRACE_SUBSET (this); |
2801 | auto *out = c->serializer->start_embed (this); |
2802 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
2803 | |
2804 | out->format = format; |
2805 | out->glyphCount = glyphCount; |
2806 | |
2807 | auto coverages = coverageZ.as_array (glyphCount); |
2808 | |
2809 | for (const Offset16To<Coverage>& offset : coverages) |
2810 | { |
2811 | /* TODO(subset) This looks like should not be necessary to write this way. */ |
2812 | auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size); |
2813 | if (unlikely (!o)) return_trace (false); |
2814 | if (!o->serialize_subset (c, offset, this)) return_trace (false); |
2815 | } |
2816 | |
2817 | const auto& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount)); |
2818 | const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups; |
2819 | |
2820 | |
2821 | unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map); |
2822 | return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW)); |
2823 | } |
2824 | |
2825 | bool sanitize (hb_sanitize_context_t *c) const |
2826 | { |
2827 | TRACE_SANITIZE (this); |
2828 | if (unlikely (!c->check_struct (this))) return_trace (false); |
2829 | unsigned int count = glyphCount; |
2830 | if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */ |
2831 | if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false); |
2832 | for (unsigned int i = 0; i < count; i++) |
2833 | if (unlikely (!coverageZ[i].sanitize (c, this))) return_trace (false); |
2834 | const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount)); |
2835 | return_trace (likely (c->check_array (lookupRecord, lookupCount))); |
2836 | } |
2837 | |
2838 | protected: |
2839 | HBUINT16 format; /* Format identifier--format = 3 */ |
2840 | HBUINT16 glyphCount; /* Number of glyphs in the input glyph |
2841 | * sequence */ |
2842 | HBUINT16 lookupCount; /* Number of LookupRecords */ |
2843 | UnsizedArrayOf<Offset16To<Coverage>> |
2844 | coverageZ; /* Array of offsets to Coverage |
2845 | * table in glyph sequence order */ |
2846 | /*UnsizedArrayOf<LookupRecord> |
2847 | lookupRecordX;*/ /* Array of LookupRecords--in |
2848 | * design order */ |
2849 | public: |
2850 | DEFINE_SIZE_ARRAY (6, coverageZ); |
2851 | }; |
2852 | |
2853 | struct Context |
2854 | { |
2855 | template <typename context_t, typename ...Ts> |
2856 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
2857 | { |
2858 | if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value (); |
2859 | TRACE_DISPATCH (this, u.format); |
2860 | switch (u.format) { |
2861 | case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...)); |
2862 | case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...)); |
2863 | case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...)); |
2864 | #ifndef HB_NO_BEYOND_64K |
2865 | case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...)); |
2866 | case 5: return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...)); |
2867 | #endif |
2868 | default:return_trace (c->default_return_value ()); |
2869 | } |
2870 | } |
2871 | |
2872 | protected: |
2873 | union { |
2874 | HBUINT16 format; /* Format identifier */ |
2875 | ContextFormat1_4<SmallTypes> format1; |
2876 | ContextFormat2_5<SmallTypes> format2; |
2877 | ContextFormat3 format3; |
2878 | #ifndef HB_NO_BEYOND_64K |
2879 | ContextFormat1_4<MediumTypes> format4; |
2880 | ContextFormat2_5<MediumTypes> format5; |
2881 | #endif |
2882 | } u; |
2883 | }; |
2884 | |
2885 | |
2886 | /* Chaining Contextual lookups */ |
2887 | |
2888 | struct ChainContextClosureLookupContext |
2889 | { |
2890 | ContextClosureFuncs funcs; |
2891 | ContextFormat context_format; |
2892 | const void *intersects_data[3]; |
2893 | void *intersects_cache[3]; |
2894 | void *intersected_glyphs_cache; |
2895 | }; |
2896 | |
2897 | struct ChainContextCollectGlyphsLookupContext |
2898 | { |
2899 | ContextCollectGlyphsFuncs funcs; |
2900 | const void *collect_data[3]; |
2901 | }; |
2902 | |
2903 | struct ChainContextApplyLookupContext |
2904 | { |
2905 | ChainContextApplyFuncs funcs; |
2906 | const void *match_data[3]; |
2907 | }; |
2908 | |
2909 | template <typename HBUINT> |
2910 | static inline bool chain_context_intersects (const hb_set_t *glyphs, |
2911 | unsigned int backtrackCount, |
2912 | const HBUINT backtrack[], |
2913 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
2914 | const HBUINT input[], /* Array of input values--start with second glyph */ |
2915 | unsigned int lookaheadCount, |
2916 | const HBUINT lookahead[], |
2917 | ChainContextClosureLookupContext &lookup_context) |
2918 | { |
2919 | return array_is_subset_of (glyphs, |
2920 | backtrackCount, backtrack, |
2921 | lookup_context.funcs.intersects, |
2922 | lookup_context.intersects_data[0], |
2923 | lookup_context.intersects_cache[0]) |
2924 | && array_is_subset_of (glyphs, |
2925 | inputCount ? inputCount - 1 : 0, input, |
2926 | lookup_context.funcs.intersects, |
2927 | lookup_context.intersects_data[1], |
2928 | lookup_context.intersects_cache[1]) |
2929 | && array_is_subset_of (glyphs, |
2930 | lookaheadCount, lookahead, |
2931 | lookup_context.funcs.intersects, |
2932 | lookup_context.intersects_data[2], |
2933 | lookup_context.intersects_cache[2]); |
2934 | } |
2935 | |
2936 | template <typename HBUINT> |
2937 | static inline void chain_context_closure_lookup (hb_closure_context_t *c, |
2938 | unsigned int backtrackCount, |
2939 | const HBUINT backtrack[], |
2940 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
2941 | const HBUINT input[], /* Array of input values--start with second glyph */ |
2942 | unsigned int lookaheadCount, |
2943 | const HBUINT lookahead[], |
2944 | unsigned int lookupCount, |
2945 | const LookupRecord lookupRecord[], |
2946 | unsigned value, |
2947 | ChainContextClosureLookupContext &lookup_context) |
2948 | { |
2949 | if (chain_context_intersects (c->glyphs, |
2950 | backtrackCount, backtrack, |
2951 | inputCount, input, |
2952 | lookaheadCount, lookahead, |
2953 | lookup_context)) |
2954 | context_closure_recurse_lookups (c, |
2955 | inputCount, input, |
2956 | lookupCount, lookupRecord, |
2957 | value, |
2958 | lookup_context.context_format, |
2959 | lookup_context.intersects_data[1], |
2960 | lookup_context.funcs.intersected_glyphs, |
2961 | lookup_context.intersected_glyphs_cache); |
2962 | } |
2963 | |
2964 | template <typename HBUINT> |
2965 | static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, |
2966 | unsigned int backtrackCount, |
2967 | const HBUINT backtrack[], |
2968 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
2969 | const HBUINT input[], /* Array of input values--start with second glyph */ |
2970 | unsigned int lookaheadCount, |
2971 | const HBUINT lookahead[], |
2972 | unsigned int lookupCount, |
2973 | const LookupRecord lookupRecord[], |
2974 | ChainContextCollectGlyphsLookupContext &lookup_context) |
2975 | { |
2976 | collect_array (c, c->before, |
2977 | backtrackCount, backtrack, |
2978 | lookup_context.funcs.collect, lookup_context.collect_data[0]); |
2979 | collect_array (c, c->input, |
2980 | inputCount ? inputCount - 1 : 0, input, |
2981 | lookup_context.funcs.collect, lookup_context.collect_data[1]); |
2982 | collect_array (c, c->after, |
2983 | lookaheadCount, lookahead, |
2984 | lookup_context.funcs.collect, lookup_context.collect_data[2]); |
2985 | recurse_lookups (c, |
2986 | lookupCount, lookupRecord); |
2987 | } |
2988 | |
2989 | template <typename HBUINT> |
2990 | static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, |
2991 | unsigned int backtrackCount, |
2992 | const HBUINT backtrack[] HB_UNUSED, |
2993 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
2994 | const HBUINT input[], /* Array of input values--start with second glyph */ |
2995 | unsigned int lookaheadCount, |
2996 | const HBUINT lookahead[] HB_UNUSED, |
2997 | unsigned int lookupCount HB_UNUSED, |
2998 | const LookupRecord lookupRecord[] HB_UNUSED, |
2999 | const ChainContextApplyLookupContext &lookup_context) |
3000 | { |
3001 | return (c->zero_context ? !backtrackCount && !lookaheadCount : true) |
3002 | && would_match_input (c, |
3003 | inputCount, input, |
3004 | lookup_context.funcs.match[1], lookup_context.match_data[1]); |
3005 | } |
3006 | |
3007 | template <typename HBUINT> |
3008 | HB_ALWAYS_INLINE |
3009 | static bool chain_context_apply_lookup (hb_ot_apply_context_t *c, |
3010 | unsigned int backtrackCount, |
3011 | const HBUINT backtrack[], |
3012 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
3013 | const HBUINT input[], /* Array of input values--start with second glyph */ |
3014 | unsigned int lookaheadCount, |
3015 | const HBUINT lookahead[], |
3016 | unsigned int lookupCount, |
3017 | const LookupRecord lookupRecord[], |
3018 | const ChainContextApplyLookupContext &lookup_context) |
3019 | { |
3020 | unsigned end_index = c->buffer->idx; |
3021 | unsigned match_end = 0; |
3022 | unsigned match_positions[HB_MAX_CONTEXT_LENGTH]; |
3023 | if (!(match_input (c, |
3024 | inputCount, input, |
3025 | lookup_context.funcs.match[1], lookup_context.match_data[1], |
3026 | &match_end, match_positions) && (end_index = match_end) |
3027 | && match_lookahead (c, |
3028 | lookaheadCount, lookahead, |
3029 | lookup_context.funcs.match[2], lookup_context.match_data[2], |
3030 | match_end, &end_index))) |
3031 | { |
3032 | c->buffer->unsafe_to_concat (c->buffer->idx, end_index); |
3033 | return false; |
3034 | } |
3035 | |
3036 | unsigned start_index = c->buffer->out_len; |
3037 | if (!match_backtrack (c, |
3038 | backtrackCount, backtrack, |
3039 | lookup_context.funcs.match[0], lookup_context.match_data[0], |
3040 | &start_index)) |
3041 | { |
3042 | c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index); |
3043 | return false; |
3044 | } |
3045 | |
3046 | c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index); |
3047 | apply_lookup (c, |
3048 | inputCount, match_positions, |
3049 | lookupCount, lookupRecord, |
3050 | match_end); |
3051 | return true; |
3052 | } |
3053 | |
3054 | template <typename Types> |
3055 | struct ChainRule |
3056 | { |
3057 | template <typename T> |
3058 | friend struct ChainRuleSet; |
3059 | |
3060 | bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const |
3061 | { |
3062 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3063 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3064 | return chain_context_intersects (glyphs, |
3065 | backtrack.len, backtrack.arrayZ, |
3066 | input.lenP1, input.arrayZ, |
3067 | lookahead.len, lookahead.arrayZ, |
3068 | lookup_context); |
3069 | } |
3070 | |
3071 | void closure (hb_closure_context_t *c, unsigned value, |
3072 | ChainContextClosureLookupContext &lookup_context) const |
3073 | { |
3074 | if (unlikely (c->lookup_limit_exceeded ())) return; |
3075 | |
3076 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3077 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3078 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3079 | chain_context_closure_lookup (c, |
3080 | backtrack.len, backtrack.arrayZ, |
3081 | input.lenP1, input.arrayZ, |
3082 | lookahead.len, lookahead.arrayZ, |
3083 | lookup.len, lookup.arrayZ, |
3084 | value, |
3085 | lookup_context); |
3086 | } |
3087 | |
3088 | void closure_lookups (hb_closure_lookups_context_t *c, |
3089 | ChainContextClosureLookupContext &lookup_context) const |
3090 | { |
3091 | if (unlikely (c->lookup_limit_exceeded ())) return; |
3092 | if (!intersects (c->glyphs, lookup_context)) return; |
3093 | |
3094 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3095 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3096 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3097 | recurse_lookups (c, lookup.len, lookup.arrayZ); |
3098 | } |
3099 | |
3100 | void collect_glyphs (hb_collect_glyphs_context_t *c, |
3101 | ChainContextCollectGlyphsLookupContext &lookup_context) const |
3102 | { |
3103 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3104 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3105 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3106 | chain_context_collect_glyphs_lookup (c, |
3107 | backtrack.len, backtrack.arrayZ, |
3108 | input.lenP1, input.arrayZ, |
3109 | lookahead.len, lookahead.arrayZ, |
3110 | lookup.len, lookup.arrayZ, |
3111 | lookup_context); |
3112 | } |
3113 | |
3114 | bool would_apply (hb_would_apply_context_t *c, |
3115 | const ChainContextApplyLookupContext &lookup_context) const |
3116 | { |
3117 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3118 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3119 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3120 | return chain_context_would_apply_lookup (c, |
3121 | backtrack.len, backtrack.arrayZ, |
3122 | input.lenP1, input.arrayZ, |
3123 | lookahead.len, lookahead.arrayZ, lookup.len, |
3124 | lookup.arrayZ, lookup_context); |
3125 | } |
3126 | |
3127 | bool apply (hb_ot_apply_context_t *c, |
3128 | const ChainContextApplyLookupContext &lookup_context) const |
3129 | { |
3130 | TRACE_APPLY (this); |
3131 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3132 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3133 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3134 | return_trace (chain_context_apply_lookup (c, |
3135 | backtrack.len, backtrack.arrayZ, |
3136 | input.lenP1, input.arrayZ, |
3137 | lookahead.len, lookahead.arrayZ, lookup.len, |
3138 | lookup.arrayZ, lookup_context)); |
3139 | } |
3140 | |
3141 | template<typename Iterator, |
3142 | hb_requires (hb_is_iterator (Iterator))> |
3143 | void serialize_array (hb_serialize_context_t *c, |
3144 | HBUINT16 len, |
3145 | Iterator it) const |
3146 | { |
3147 | c->copy (len); |
3148 | for (const auto g : it) |
3149 | c->copy ((HBUINT16) g); |
3150 | } |
3151 | |
3152 | bool serialize (hb_serialize_context_t *c, |
3153 | const hb_map_t *lookup_map, |
3154 | const hb_map_t *backtrack_map, |
3155 | const hb_map_t *input_map = nullptr, |
3156 | const hb_map_t *lookahead_map = nullptr) const |
3157 | { |
3158 | TRACE_SERIALIZE (this); |
3159 | |
3160 | const hb_map_t *mapping = backtrack_map; |
3161 | serialize_array (c, backtrack.len, + backtrack.iter () |
3162 | | hb_map (mapping)); |
3163 | |
3164 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3165 | if (input_map) mapping = input_map; |
3166 | serialize_array (c, input.lenP1, + input.iter () |
3167 | | hb_map (mapping)); |
3168 | |
3169 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3170 | if (lookahead_map) mapping = lookahead_map; |
3171 | serialize_array (c, lookahead.len, + lookahead.iter () |
3172 | | hb_map (mapping)); |
3173 | |
3174 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3175 | |
3176 | HBUINT16* lookupCount = c->embed (&(lookup.len)); |
3177 | if (!lookupCount) return_trace (false); |
3178 | |
3179 | unsigned count = serialize_lookuprecord_array (c, lookup.as_array (), lookup_map); |
3180 | return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW)); |
3181 | } |
3182 | |
3183 | bool subset (hb_subset_context_t *c, |
3184 | const hb_map_t *lookup_map, |
3185 | const hb_map_t *backtrack_map = nullptr, |
3186 | const hb_map_t *input_map = nullptr, |
3187 | const hb_map_t *lookahead_map = nullptr) const |
3188 | { |
3189 | TRACE_SUBSET (this); |
3190 | |
3191 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3192 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3193 | |
3194 | if (!backtrack_map) |
3195 | { |
3196 | const hb_set_t &glyphset = *c->plan->glyphset_gsub (); |
3197 | if (!hb_all (backtrack, glyphset) || |
3198 | !hb_all (input, glyphset) || |
3199 | !hb_all (lookahead, glyphset)) |
3200 | return_trace (false); |
3201 | |
3202 | serialize (c->serializer, lookup_map, c->plan->glyph_map); |
3203 | } |
3204 | else |
3205 | { |
3206 | if (!hb_all (backtrack, backtrack_map) || |
3207 | !hb_all (input, input_map) || |
3208 | !hb_all (lookahead, lookahead_map)) |
3209 | return_trace (false); |
3210 | |
3211 | serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map); |
3212 | } |
3213 | |
3214 | return_trace (true); |
3215 | } |
3216 | |
3217 | bool sanitize (hb_sanitize_context_t *c) const |
3218 | { |
3219 | TRACE_SANITIZE (this); |
3220 | /* Hyper-optimized sanitized because this is really hot. */ |
3221 | if (unlikely (!backtrack.len.sanitize (c))) return_trace (false); |
3222 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3223 | if (unlikely (!input.lenP1.sanitize (c))) return_trace (false); |
3224 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3225 | if (unlikely (!lookahead.len.sanitize (c))) return_trace (false); |
3226 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3227 | return_trace (likely (lookup.sanitize (c))); |
3228 | } |
3229 | |
3230 | protected: |
3231 | Array16Of<typename Types::HBUINT> |
3232 | backtrack; /* Array of backtracking values |
3233 | * (to be matched before the input |
3234 | * sequence) */ |
3235 | HeadlessArray16Of<typename Types::HBUINT> |
3236 | inputX; /* Array of input values (start with |
3237 | * second glyph) */ |
3238 | Array16Of<typename Types::HBUINT> |
3239 | lookaheadX; /* Array of lookahead values's (to be |
3240 | * matched after the input sequence) */ |
3241 | Array16Of<LookupRecord> |
3242 | lookupX; /* Array of LookupRecords--in |
3243 | * design order) */ |
3244 | public: |
3245 | DEFINE_SIZE_MIN (8); |
3246 | }; |
3247 | |
3248 | template <typename Types> |
3249 | struct ChainRuleSet |
3250 | { |
3251 | using ChainRule = OT::ChainRule<Types>; |
3252 | |
3253 | bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const |
3254 | { |
3255 | return |
3256 | + hb_iter (rule) |
3257 | | hb_map (hb_add (this)) |
3258 | | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); }) |
3259 | | hb_any |
3260 | ; |
3261 | } |
3262 | void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const |
3263 | { |
3264 | if (unlikely (c->lookup_limit_exceeded ())) return; |
3265 | |
3266 | return |
3267 | + hb_iter (rule) |
3268 | | hb_map (hb_add (this)) |
3269 | | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); }) |
3270 | ; |
3271 | } |
3272 | |
3273 | void closure_lookups (hb_closure_lookups_context_t *c, |
3274 | ChainContextClosureLookupContext &lookup_context) const |
3275 | { |
3276 | if (unlikely (c->lookup_limit_exceeded ())) return; |
3277 | |
3278 | + hb_iter (rule) |
3279 | | hb_map (hb_add (this)) |
3280 | | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); }) |
3281 | ; |
3282 | } |
3283 | |
3284 | void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const |
3285 | { |
3286 | return |
3287 | + hb_iter (rule) |
3288 | | hb_map (hb_add (this)) |
3289 | | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); }) |
3290 | ; |
3291 | } |
3292 | |
3293 | bool would_apply (hb_would_apply_context_t *c, |
3294 | const ChainContextApplyLookupContext &lookup_context) const |
3295 | { |
3296 | return |
3297 | + hb_iter (rule) |
3298 | | hb_map (hb_add (this)) |
3299 | | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); }) |
3300 | | hb_any |
3301 | ; |
3302 | } |
3303 | |
3304 | bool apply (hb_ot_apply_context_t *c, |
3305 | const ChainContextApplyLookupContext &lookup_context) const |
3306 | { |
3307 | TRACE_APPLY (this); |
3308 | |
3309 | unsigned num_rules = rule.len; |
3310 | |
3311 | #ifndef HB_NO_OT_RULESETS_FAST_PATH |
3312 | if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4) |
3313 | #endif |
3314 | { |
3315 | slow: |
3316 | return_trace ( |
3317 | + hb_iter (rule) |
3318 | | hb_map (hb_add (this)) |
3319 | | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); }) |
3320 | | hb_any |
3321 | ) |
3322 | ; |
3323 | } |
3324 | |
3325 | /* This version is optimized for speed by matching the first & second |
3326 | * components of the rule here, instead of calling into the matching code. |
3327 | * |
3328 | * Replicated from LigatureSet::apply(). */ |
3329 | |
3330 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
3331 | skippy_iter.reset (c->buffer->idx); |
3332 | skippy_iter.set_match_func (match_always, nullptr); |
3333 | skippy_iter.set_glyph_data ((HBUINT16 *) nullptr); |
3334 | unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0; |
3335 | hb_glyph_info_t *first = nullptr, *second = nullptr; |
3336 | bool matched = skippy_iter.next (); |
3337 | if (likely (matched)) |
3338 | { |
3339 | first = &c->buffer->info[skippy_iter.idx]; |
3340 | unsafe_to1 = skippy_iter.idx + 1; |
3341 | |
3342 | if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])) |
3343 | { |
3344 | /* Can't use the fast path if eg. the next char is a default-ignorable |
3345 | * or other skippable. */ |
3346 | goto slow; |
3347 | } |
3348 | } |
3349 | else |
3350 | { |
3351 | /* Failed to match a next glyph. Only try applying rules that have |
3352 | * no further input and lookahead. */ |
3353 | return_trace ( |
3354 | + hb_iter (rule) |
3355 | | hb_map (hb_add (this)) |
3356 | | hb_filter ([&] (const ChainRule &_) |
3357 | { |
3358 | const auto &input = StructAfter<decltype (_.inputX)> (_.backtrack); |
3359 | const auto &lookahead = StructAfter<decltype (_.lookaheadX)> (input); |
3360 | return input.lenP1 <= 1 && lookahead.len == 0; |
3361 | }) |
3362 | | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); }) |
3363 | | hb_any |
3364 | ) |
3365 | ; |
3366 | } |
3367 | matched = skippy_iter.next (); |
3368 | if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))) |
3369 | { |
3370 | second = &c->buffer->info[skippy_iter.idx]; |
3371 | unsafe_to2 = skippy_iter.idx + 1; |
3372 | } |
3373 | |
3374 | auto match_input = lookup_context.funcs.match[1]; |
3375 | auto match_lookahead = lookup_context.funcs.match[2]; |
3376 | auto *input_data = lookup_context.match_data[1]; |
3377 | auto *lookahead_data = lookup_context.match_data[2]; |
3378 | for (unsigned int i = 0; i < num_rules; i++) |
3379 | { |
3380 | const auto &r = this+rule.arrayZ[i]; |
3381 | |
3382 | const auto &input = StructAfter<decltype (r.inputX)> (r.backtrack); |
3383 | const auto &lookahead = StructAfter<decltype (r.lookaheadX)> (input); |
3384 | |
3385 | unsigned lenP1 = hb_max ((unsigned) input.lenP1, 1u); |
3386 | if (lenP1 > 1 ? |
3387 | (!match_input || |
3388 | match_input (*first, input.arrayZ[0], input_data)) |
3389 | : |
3390 | (!lookahead.len || !match_lookahead || |
3391 | match_lookahead (*first, lookahead.arrayZ[0], lookahead_data))) |
3392 | { |
3393 | if (!second || |
3394 | (lenP1 > 2 ? |
3395 | (!match_input || |
3396 | match_input (*second, input.arrayZ[1], input_data)) |
3397 | : |
3398 | (lookahead.len <= 2 - lenP1 || !match_lookahead || |
3399 | match_lookahead (*second, lookahead.arrayZ[2 - lenP1], lookahead_data)))) |
3400 | { |
3401 | if (r.apply (c, lookup_context)) |
3402 | { |
3403 | if (unsafe_to != (unsigned) -1) |
3404 | c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to); |
3405 | return_trace (true); |
3406 | } |
3407 | } |
3408 | else |
3409 | unsafe_to = unsafe_to2; |
3410 | } |
3411 | else |
3412 | { |
3413 | if (unsafe_to == (unsigned) -1) |
3414 | unsafe_to = unsafe_to1; |
3415 | } |
3416 | } |
3417 | if (likely (unsafe_to != (unsigned) -1)) |
3418 | c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to); |
3419 | |
3420 | return_trace (false); |
3421 | } |
3422 | |
3423 | bool subset (hb_subset_context_t *c, |
3424 | const hb_map_t *lookup_map, |
3425 | const hb_map_t *backtrack_klass_map = nullptr, |
3426 | const hb_map_t *input_klass_map = nullptr, |
3427 | const hb_map_t *lookahead_klass_map = nullptr) const |
3428 | { |
3429 | TRACE_SUBSET (this); |
3430 | |
3431 | auto snap = c->serializer->snapshot (); |
3432 | auto *out = c->serializer->start_embed (*this); |
3433 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
3434 | |
3435 | for (const Offset16To<ChainRule>& _ : rule) |
3436 | { |
3437 | if (!_) continue; |
3438 | auto o_snap = c->serializer->snapshot (); |
3439 | auto *o = out->rule.serialize_append (c->serializer); |
3440 | if (unlikely (!o)) continue; |
3441 | |
3442 | if (!o->serialize_subset (c, _, this, |
3443 | lookup_map, |
3444 | backtrack_klass_map, |
3445 | input_klass_map, |
3446 | lookahead_klass_map)) |
3447 | { |
3448 | out->rule.pop (); |
3449 | c->serializer->revert (o_snap); |
3450 | } |
3451 | } |
3452 | |
3453 | bool ret = bool (out->rule); |
3454 | if (!ret) c->serializer->revert (snap); |
3455 | |
3456 | return_trace (ret); |
3457 | } |
3458 | |
3459 | bool sanitize (hb_sanitize_context_t *c) const |
3460 | { |
3461 | TRACE_SANITIZE (this); |
3462 | return_trace (rule.sanitize (c, this)); |
3463 | } |
3464 | |
3465 | protected: |
3466 | Array16OfOffset16To<ChainRule> |
3467 | rule; /* Array of ChainRule tables |
3468 | * ordered by preference */ |
3469 | public: |
3470 | DEFINE_SIZE_ARRAY (2, rule); |
3471 | }; |
3472 | |
3473 | template <typename Types> |
3474 | struct ChainContextFormat1_4 |
3475 | { |
3476 | using ChainRuleSet = OT::ChainRuleSet<Types>; |
3477 | |
3478 | bool intersects (const hb_set_t *glyphs) const |
3479 | { |
3480 | struct ChainContextClosureLookupContext lookup_context = { |
3481 | {intersects_glyph, intersected_glyph}, |
3482 | ContextFormat::SimpleContext, |
3483 | {nullptr, nullptr, nullptr} |
3484 | }; |
3485 | |
3486 | return |
3487 | + hb_zip (this+coverage, ruleSet) |
3488 | | hb_filter (*glyphs, hb_first) |
3489 | | hb_map (hb_second) |
3490 | | hb_map (hb_add (this)) |
3491 | | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); }) |
3492 | | hb_any |
3493 | ; |
3494 | } |
3495 | |
3496 | bool may_have_non_1to1 () const |
3497 | { return true; } |
3498 | |
3499 | void closure (hb_closure_context_t *c) const |
3500 | { |
3501 | hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs (); |
3502 | if (unlikely (!cur_active_glyphs)) return; |
3503 | get_coverage ().intersect_set (c->previous_parent_active_glyphs (), |
3504 | *cur_active_glyphs); |
3505 | |
3506 | struct ChainContextClosureLookupContext lookup_context = { |
3507 | {intersects_glyph, intersected_glyph}, |
3508 | ContextFormat::SimpleContext, |
3509 | {nullptr, nullptr, nullptr} |
3510 | }; |
3511 | |
3512 | + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len)) |
3513 | | hb_filter ([&] (hb_codepoint_t _) { |
3514 | return c->previous_parent_active_glyphs ().has (_); |
3515 | }, hb_first) |
3516 | | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); }) |
3517 | | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); }) |
3518 | ; |
3519 | |
3520 | c->pop_cur_done_glyphs (); |
3521 | } |
3522 | |
3523 | void closure_lookups (hb_closure_lookups_context_t *c) const |
3524 | { |
3525 | struct ChainContextClosureLookupContext lookup_context = { |
3526 | {intersects_glyph, nullptr}, |
3527 | ContextFormat::SimpleContext, |
3528 | {nullptr, nullptr, nullptr} |
3529 | }; |
3530 | |
3531 | + hb_zip (this+coverage, ruleSet) |
3532 | | hb_filter (*c->glyphs, hb_first) |
3533 | | hb_map (hb_second) |
3534 | | hb_map (hb_add (this)) |
3535 | | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); }) |
3536 | ; |
3537 | } |
3538 | |
3539 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {} |
3540 | |
3541 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
3542 | { |
3543 | (this+coverage).collect_coverage (c->input); |
3544 | |
3545 | struct ChainContextCollectGlyphsLookupContext lookup_context = { |
3546 | {collect_glyph}, |
3547 | {nullptr, nullptr, nullptr} |
3548 | }; |
3549 | |
3550 | + hb_iter (ruleSet) |
3551 | | hb_map (hb_add (this)) |
3552 | | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); }) |
3553 | ; |
3554 | } |
3555 | |
3556 | bool would_apply (hb_would_apply_context_t *c) const |
3557 | { |
3558 | const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; |
3559 | struct ChainContextApplyLookupContext lookup_context = { |
3560 | {{match_glyph, match_glyph, match_glyph}}, |
3561 | {nullptr, nullptr, nullptr} |
3562 | }; |
3563 | return rule_set.would_apply (c, lookup_context); |
3564 | } |
3565 | |
3566 | const Coverage &get_coverage () const { return this+coverage; } |
3567 | |
3568 | bool apply (hb_ot_apply_context_t *c) const |
3569 | { |
3570 | TRACE_APPLY (this); |
3571 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
3572 | if (likely (index == NOT_COVERED)) return_trace (false); |
3573 | |
3574 | const ChainRuleSet &rule_set = this+ruleSet[index]; |
3575 | struct ChainContextApplyLookupContext lookup_context = { |
3576 | {{match_glyph, match_glyph, match_glyph}}, |
3577 | {nullptr, nullptr, nullptr} |
3578 | }; |
3579 | return_trace (rule_set.apply (c, lookup_context)); |
3580 | } |
3581 | |
3582 | bool subset (hb_subset_context_t *c) const |
3583 | { |
3584 | TRACE_SUBSET (this); |
3585 | const hb_set_t &glyphset = *c->plan->glyphset_gsub (); |
3586 | const hb_map_t &glyph_map = *c->plan->glyph_map; |
3587 | |
3588 | auto *out = c->serializer->start_embed (*this); |
3589 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
3590 | out->format = format; |
3591 | |
3592 | const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups; |
3593 | hb_sorted_vector_t<hb_codepoint_t> new_coverage; |
3594 | + hb_zip (this+coverage, ruleSet) |
3595 | | hb_filter (glyphset, hb_first) |
3596 | | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second) |
3597 | | hb_map (hb_first) |
3598 | | hb_map (glyph_map) |
3599 | | hb_sink (new_coverage) |
3600 | ; |
3601 | |
3602 | out->coverage.serialize_serialize (c->serializer, new_coverage.iter ()); |
3603 | return_trace (bool (new_coverage)); |
3604 | } |
3605 | |
3606 | bool sanitize (hb_sanitize_context_t *c) const |
3607 | { |
3608 | TRACE_SANITIZE (this); |
3609 | return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); |
3610 | } |
3611 | |
3612 | protected: |
3613 | HBUINT16 format; /* Format identifier--format = 1 */ |
3614 | typename Types::template OffsetTo<Coverage> |
3615 | coverage; /* Offset to Coverage table--from |
3616 | * beginning of table */ |
3617 | Array16Of<typename Types::template OffsetTo<ChainRuleSet>> |
3618 | ruleSet; /* Array of ChainRuleSet tables |
3619 | * ordered by Coverage Index */ |
3620 | public: |
3621 | DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet); |
3622 | }; |
3623 | |
3624 | template <typename Types> |
3625 | struct ChainContextFormat2_5 |
3626 | { |
3627 | using ChainRuleSet = OT::ChainRuleSet<SmallTypes>; |
3628 | |
3629 | bool intersects (const hb_set_t *glyphs) const |
3630 | { |
3631 | if (!(this+coverage).intersects (glyphs)) |
3632 | return false; |
3633 | |
3634 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
3635 | const ClassDef &input_class_def = this+inputClassDef; |
3636 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
3637 | |
3638 | hb_map_t caches[3] = {}; |
3639 | struct ChainContextClosureLookupContext lookup_context = { |
3640 | {intersects_class, nullptr}, |
3641 | ContextFormat::ClassBasedContext, |
3642 | {&backtrack_class_def, |
3643 | &input_class_def, |
3644 | &lookahead_class_def}, |
3645 | {&caches[0], &caches[1], &caches[2]} |
3646 | }; |
3647 | |
3648 | hb_set_t retained_coverage_glyphs; |
3649 | (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs); |
3650 | |
3651 | hb_set_t coverage_glyph_classes; |
3652 | input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes); |
3653 | |
3654 | return |
3655 | + hb_iter (ruleSet) |
3656 | | hb_map (hb_add (this)) |
3657 | | hb_enumerate |
3658 | | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p) |
3659 | { return input_class_def.intersects_class (glyphs, p.first) && |
3660 | coverage_glyph_classes.has (p.first) && |
3661 | p.second.intersects (glyphs, lookup_context); }) |
3662 | | hb_any |
3663 | ; |
3664 | } |
3665 | |
3666 | bool may_have_non_1to1 () const |
3667 | { return true; } |
3668 | |
3669 | void closure (hb_closure_context_t *c) const |
3670 | { |
3671 | if (!(this+coverage).intersects (c->glyphs)) |
3672 | return; |
3673 | |
3674 | hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs (); |
3675 | if (unlikely (!cur_active_glyphs)) return; |
3676 | get_coverage ().intersect_set (c->previous_parent_active_glyphs (), |
3677 | *cur_active_glyphs); |
3678 | |
3679 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
3680 | const ClassDef &input_class_def = this+inputClassDef; |
3681 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
3682 | |
3683 | hb_map_t caches[3] = {}; |
3684 | intersected_class_cache_t intersected_cache; |
3685 | struct ChainContextClosureLookupContext lookup_context = { |
3686 | {intersects_class, intersected_class_glyphs}, |
3687 | ContextFormat::ClassBasedContext, |
3688 | {&backtrack_class_def, |
3689 | &input_class_def, |
3690 | &lookahead_class_def}, |
3691 | {&caches[0], &caches[1], &caches[2]}, |
3692 | &intersected_cache |
3693 | }; |
3694 | |
3695 | + hb_enumerate (ruleSet) |
3696 | | hb_filter ([&] (unsigned _) |
3697 | { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); }, |
3698 | hb_first) |
3699 | | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<ChainRuleSet>&> _) |
3700 | { |
3701 | const ChainRuleSet& chainrule_set = this+_.second; |
3702 | chainrule_set.closure (c, _.first, lookup_context); |
3703 | }) |
3704 | ; |
3705 | |
3706 | c->pop_cur_done_glyphs (); |
3707 | } |
3708 | |
3709 | void closure_lookups (hb_closure_lookups_context_t *c) const |
3710 | { |
3711 | if (!(this+coverage).intersects (c->glyphs)) |
3712 | return; |
3713 | |
3714 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
3715 | const ClassDef &input_class_def = this+inputClassDef; |
3716 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
3717 | |
3718 | hb_map_t caches[3] = {}; |
3719 | struct ChainContextClosureLookupContext lookup_context = { |
3720 | {intersects_class, nullptr}, |
3721 | ContextFormat::ClassBasedContext, |
3722 | {&backtrack_class_def, |
3723 | &input_class_def, |
3724 | &lookahead_class_def}, |
3725 | {&caches[0], &caches[1], &caches[2]} |
3726 | }; |
3727 | |
3728 | + hb_iter (ruleSet) |
3729 | | hb_map (hb_add (this)) |
3730 | | hb_enumerate |
3731 | | hb_filter([&] (unsigned klass) |
3732 | { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first) |
3733 | | hb_map (hb_second) |
3734 | | hb_apply ([&] (const ChainRuleSet &_) |
3735 | { _.closure_lookups (c, lookup_context); }) |
3736 | ; |
3737 | } |
3738 | |
3739 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {} |
3740 | |
3741 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
3742 | { |
3743 | (this+coverage).collect_coverage (c->input); |
3744 | |
3745 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
3746 | const ClassDef &input_class_def = this+inputClassDef; |
3747 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
3748 | |
3749 | struct ChainContextCollectGlyphsLookupContext lookup_context = { |
3750 | {collect_class}, |
3751 | {&backtrack_class_def, |
3752 | &input_class_def, |
3753 | &lookahead_class_def} |
3754 | }; |
3755 | |
3756 | + hb_iter (ruleSet) |
3757 | | hb_map (hb_add (this)) |
3758 | | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); }) |
3759 | ; |
3760 | } |
3761 | |
3762 | bool would_apply (hb_would_apply_context_t *c) const |
3763 | { |
3764 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
3765 | const ClassDef &input_class_def = this+inputClassDef; |
3766 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
3767 | |
3768 | unsigned int index = input_class_def.get_class (c->glyphs[0]); |
3769 | const ChainRuleSet &rule_set = this+ruleSet[index]; |
3770 | struct ChainContextApplyLookupContext lookup_context = { |
3771 | {{match_class, match_class, match_class}}, |
3772 | {&backtrack_class_def, |
3773 | &input_class_def, |
3774 | &lookahead_class_def} |
3775 | }; |
3776 | return rule_set.would_apply (c, lookup_context); |
3777 | } |
3778 | |
3779 | const Coverage &get_coverage () const { return this+coverage; } |
3780 | |
3781 | unsigned cache_cost () const |
3782 | { |
3783 | unsigned c = (this+lookaheadClassDef).cost () * ruleSet.len; |
3784 | return c >= 4 ? c : 0; |
3785 | } |
3786 | bool cache_func (hb_ot_apply_context_t *c, bool enter) const |
3787 | { |
3788 | if (enter) |
3789 | { |
3790 | if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable)) |
3791 | return false; |
3792 | auto &info = c->buffer->info; |
3793 | unsigned count = c->buffer->len; |
3794 | for (unsigned i = 0; i < count; i++) |
3795 | info[i].syllable() = 255; |
3796 | c->new_syllables = 255; |
3797 | return true; |
3798 | } |
3799 | else |
3800 | { |
3801 | c->new_syllables = (unsigned) -1; |
3802 | HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable); |
3803 | return true; |
3804 | } |
3805 | } |
3806 | |
3807 | bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); } |
3808 | bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); } |
3809 | bool _apply (hb_ot_apply_context_t *c, bool cached) const |
3810 | { |
3811 | TRACE_APPLY (this); |
3812 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
3813 | if (likely (index == NOT_COVERED)) return_trace (false); |
3814 | |
3815 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
3816 | const ClassDef &input_class_def = this+inputClassDef; |
3817 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
3818 | |
3819 | /* match_class_caches1 is slightly faster. Use it for lookahead, |
3820 | * which is typically longer. */ |
3821 | struct ChainContextApplyLookupContext lookup_context = { |
3822 | {{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached1 : match_class, |
3823 | cached ? match_class_cached2 : match_class, |
3824 | cached ? match_class_cached1 : match_class}}, |
3825 | {&backtrack_class_def, |
3826 | &input_class_def, |
3827 | &lookahead_class_def} |
3828 | }; |
3829 | |
3830 | // Note: Corresponds to match_class_cached2 |
3831 | if (cached && ((c->buffer->cur().syllable() & 0xF0) >> 4) < 15) |
3832 | index = (c->buffer->cur().syllable () & 0xF0) >> 4; |
3833 | else |
3834 | index = input_class_def.get_class (c->buffer->cur().codepoint); |
3835 | const ChainRuleSet &rule_set = this+ruleSet[index]; |
3836 | return_trace (rule_set.apply (c, lookup_context)); |
3837 | } |
3838 | |
3839 | bool subset (hb_subset_context_t *c) const |
3840 | { |
3841 | TRACE_SUBSET (this); |
3842 | auto *out = c->serializer->start_embed (*this); |
3843 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
3844 | out->format = format; |
3845 | out->coverage.serialize_subset (c, coverage, this); |
3846 | |
3847 | hb_map_t backtrack_klass_map; |
3848 | hb_map_t input_klass_map; |
3849 | hb_map_t lookahead_klass_map; |
3850 | |
3851 | out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map); |
3852 | // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting |
3853 | out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map); |
3854 | out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map); |
3855 | |
3856 | if (unlikely (!c->serializer->propagate_error (backtrack_klass_map, |
3857 | input_klass_map, |
3858 | lookahead_klass_map))) |
3859 | return_trace (false); |
3860 | |
3861 | const hb_set_t* glyphset = c->plan->glyphset_gsub (); |
3862 | hb_set_t retained_coverage_glyphs; |
3863 | (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs); |
3864 | |
3865 | hb_set_t coverage_glyph_classes; |
3866 | (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes); |
3867 | |
3868 | int non_zero_index = -1, index = 0; |
3869 | bool ret = true; |
3870 | const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups; |
3871 | auto last_non_zero = c->serializer->snapshot (); |
3872 | for (const auto& _ : + hb_enumerate (ruleSet) |
3873 | | hb_filter (input_klass_map, hb_first)) |
3874 | { |
3875 | auto *o = out->ruleSet.serialize_append (c->serializer); |
3876 | if (unlikely (!o)) |
3877 | { |
3878 | ret = false; |
3879 | break; |
3880 | } |
3881 | if (coverage_glyph_classes.has (_.first) && |
3882 | o->serialize_subset (c, _.second, this, |
3883 | lookup_map, |
3884 | &backtrack_klass_map, |
3885 | &input_klass_map, |
3886 | &lookahead_klass_map)) |
3887 | { |
3888 | last_non_zero = c->serializer->snapshot (); |
3889 | non_zero_index = index; |
3890 | } |
3891 | |
3892 | index++; |
3893 | } |
3894 | |
3895 | if (!ret || non_zero_index == -1) return_trace (false); |
3896 | |
3897 | // prune empty trailing ruleSets |
3898 | if (index > non_zero_index) { |
3899 | c->serializer->revert (last_non_zero); |
3900 | out->ruleSet.len = non_zero_index + 1; |
3901 | } |
3902 | |
3903 | return_trace (bool (out->ruleSet)); |
3904 | } |
3905 | |
3906 | bool sanitize (hb_sanitize_context_t *c) const |
3907 | { |
3908 | TRACE_SANITIZE (this); |
3909 | return_trace (coverage.sanitize (c, this) && |
3910 | backtrackClassDef.sanitize (c, this) && |
3911 | inputClassDef.sanitize (c, this) && |
3912 | lookaheadClassDef.sanitize (c, this) && |
3913 | ruleSet.sanitize (c, this)); |
3914 | } |
3915 | |
3916 | protected: |
3917 | HBUINT16 format; /* Format identifier--format = 2 */ |
3918 | typename Types::template OffsetTo<Coverage> |
3919 | coverage; /* Offset to Coverage table--from |
3920 | * beginning of table */ |
3921 | typename Types::template OffsetTo<ClassDef> |
3922 | backtrackClassDef; /* Offset to glyph ClassDef table |
3923 | * containing backtrack sequence |
3924 | * data--from beginning of table */ |
3925 | typename Types::template OffsetTo<ClassDef> |
3926 | inputClassDef; /* Offset to glyph ClassDef |
3927 | * table containing input sequence |
3928 | * data--from beginning of table */ |
3929 | typename Types::template OffsetTo<ClassDef> |
3930 | lookaheadClassDef; /* Offset to glyph ClassDef table |
3931 | * containing lookahead sequence |
3932 | * data--from beginning of table */ |
3933 | Array16Of<typename Types::template OffsetTo<ChainRuleSet>> |
3934 | ruleSet; /* Array of ChainRuleSet tables |
3935 | * ordered by class */ |
3936 | public: |
3937 | DEFINE_SIZE_ARRAY (4 + 4 * Types::size, ruleSet); |
3938 | }; |
3939 | |
3940 | struct ChainContextFormat3 |
3941 | { |
3942 | using RuleSet = OT::RuleSet<SmallTypes>; |
3943 | |
3944 | bool intersects (const hb_set_t *glyphs) const |
3945 | { |
3946 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3947 | |
3948 | if (!(this+input[0]).intersects (glyphs)) |
3949 | return false; |
3950 | |
3951 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3952 | struct ChainContextClosureLookupContext lookup_context = { |
3953 | {intersects_coverage, nullptr}, |
3954 | ContextFormat::CoverageBasedContext, |
3955 | {this, this, this} |
3956 | }; |
3957 | return chain_context_intersects (glyphs, |
3958 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
3959 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
3960 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
3961 | lookup_context); |
3962 | } |
3963 | |
3964 | bool may_have_non_1to1 () const |
3965 | { return true; } |
3966 | |
3967 | void closure (hb_closure_context_t *c) const |
3968 | { |
3969 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
3970 | |
3971 | if (!(this+input[0]).intersects (c->glyphs)) |
3972 | return; |
3973 | |
3974 | hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs (); |
3975 | if (unlikely (!cur_active_glyphs)) |
3976 | return; |
3977 | get_coverage ().intersect_set (c->previous_parent_active_glyphs (), |
3978 | *cur_active_glyphs); |
3979 | |
3980 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
3981 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
3982 | struct ChainContextClosureLookupContext lookup_context = { |
3983 | {intersects_coverage, intersected_coverage_glyphs}, |
3984 | ContextFormat::CoverageBasedContext, |
3985 | {this, this, this} |
3986 | }; |
3987 | chain_context_closure_lookup (c, |
3988 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
3989 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
3990 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
3991 | lookup.len, lookup.arrayZ, |
3992 | 0, lookup_context); |
3993 | |
3994 | c->pop_cur_done_glyphs (); |
3995 | } |
3996 | |
3997 | void closure_lookups (hb_closure_lookups_context_t *c) const |
3998 | { |
3999 | if (!intersects (c->glyphs)) |
4000 | return; |
4001 | |
4002 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
4003 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
4004 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
4005 | recurse_lookups (c, lookup.len, lookup.arrayZ); |
4006 | } |
4007 | |
4008 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {} |
4009 | |
4010 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
4011 | { |
4012 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
4013 | |
4014 | (this+input[0]).collect_coverage (c->input); |
4015 | |
4016 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
4017 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
4018 | |
4019 | struct ChainContextCollectGlyphsLookupContext lookup_context = { |
4020 | {collect_coverage}, |
4021 | {this, this, this} |
4022 | }; |
4023 | chain_context_collect_glyphs_lookup (c, |
4024 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
4025 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
4026 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
4027 | lookup.len, lookup.arrayZ, |
4028 | lookup_context); |
4029 | } |
4030 | |
4031 | bool would_apply (hb_would_apply_context_t *c) const |
4032 | { |
4033 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
4034 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
4035 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
4036 | struct ChainContextApplyLookupContext lookup_context = { |
4037 | {{match_coverage, match_coverage, match_coverage}}, |
4038 | {this, this, this} |
4039 | }; |
4040 | return chain_context_would_apply_lookup (c, |
4041 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
4042 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
4043 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
4044 | lookup.len, lookup.arrayZ, lookup_context); |
4045 | } |
4046 | |
4047 | const Coverage &get_coverage () const |
4048 | { |
4049 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
4050 | return this+input[0]; |
4051 | } |
4052 | |
4053 | bool apply (hb_ot_apply_context_t *c) const |
4054 | { |
4055 | TRACE_APPLY (this); |
4056 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
4057 | |
4058 | unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); |
4059 | if (likely (index == NOT_COVERED)) return_trace (false); |
4060 | |
4061 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
4062 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
4063 | struct ChainContextApplyLookupContext lookup_context = { |
4064 | {{match_coverage, match_coverage, match_coverage}}, |
4065 | {this, this, this} |
4066 | }; |
4067 | return_trace (chain_context_apply_lookup (c, |
4068 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
4069 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
4070 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
4071 | lookup.len, lookup.arrayZ, lookup_context)); |
4072 | } |
4073 | |
4074 | template<typename Iterator, |
4075 | hb_requires (hb_is_iterator (Iterator))> |
4076 | bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const |
4077 | { |
4078 | TRACE_SERIALIZE (this); |
4079 | auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> (); |
4080 | |
4081 | if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size))) |
4082 | return_trace (false); |
4083 | |
4084 | for (auto& offset : it) { |
4085 | auto *o = out->serialize_append (c->serializer); |
4086 | if (unlikely (!o) || !o->serialize_subset (c, offset, base)) |
4087 | return_trace (false); |
4088 | } |
4089 | |
4090 | return_trace (true); |
4091 | } |
4092 | |
4093 | bool subset (hb_subset_context_t *c) const |
4094 | { |
4095 | TRACE_SUBSET (this); |
4096 | |
4097 | if (unlikely (!c->serializer->embed (this->format))) return_trace (false); |
4098 | |
4099 | if (!serialize_coverage_offsets (c, backtrack.iter (), this)) |
4100 | return_trace (false); |
4101 | |
4102 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
4103 | if (!serialize_coverage_offsets (c, input.iter (), this)) |
4104 | return_trace (false); |
4105 | |
4106 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
4107 | if (!serialize_coverage_offsets (c, lookahead.iter (), this)) |
4108 | return_trace (false); |
4109 | |
4110 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
4111 | const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups; |
4112 | |
4113 | HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookup.len); |
4114 | if (!lookupCount) return_trace (false); |
4115 | |
4116 | unsigned count = serialize_lookuprecord_array (c->serializer, lookup.as_array (), lookup_map); |
4117 | return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW)); |
4118 | } |
4119 | |
4120 | bool sanitize (hb_sanitize_context_t *c) const |
4121 | { |
4122 | TRACE_SANITIZE (this); |
4123 | if (unlikely (!backtrack.sanitize (c, this))) return_trace (false); |
4124 | const auto &input = StructAfter<decltype (inputX)> (backtrack); |
4125 | if (unlikely (!input.sanitize (c, this))) return_trace (false); |
4126 | if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */ |
4127 | const auto &lookahead = StructAfter<decltype (lookaheadX)> (input); |
4128 | if (unlikely (!lookahead.sanitize (c, this))) return_trace (false); |
4129 | const auto &lookup = StructAfter<decltype (lookupX)> (lookahead); |
4130 | return_trace (likely (lookup.sanitize (c))); |
4131 | } |
4132 | |
4133 | protected: |
4134 | HBUINT16 format; /* Format identifier--format = 3 */ |
4135 | Array16OfOffset16To<Coverage> |
4136 | backtrack; /* Array of coverage tables |
4137 | * in backtracking sequence, in glyph |
4138 | * sequence order */ |
4139 | Array16OfOffset16To<Coverage> |
4140 | inputX ; /* Array of coverage |
4141 | * tables in input sequence, in glyph |
4142 | * sequence order */ |
4143 | Array16OfOffset16To<Coverage> |
4144 | lookaheadX; /* Array of coverage tables |
4145 | * in lookahead sequence, in glyph |
4146 | * sequence order */ |
4147 | Array16Of<LookupRecord> |
4148 | lookupX; /* Array of LookupRecords--in |
4149 | * design order) */ |
4150 | public: |
4151 | DEFINE_SIZE_MIN (10); |
4152 | }; |
4153 | |
4154 | struct ChainContext |
4155 | { |
4156 | template <typename context_t, typename ...Ts> |
4157 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
4158 | { |
4159 | if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value (); |
4160 | TRACE_DISPATCH (this, u.format); |
4161 | switch (u.format) { |
4162 | case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...)); |
4163 | case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...)); |
4164 | case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...)); |
4165 | #ifndef HB_NO_BEYOND_64K |
4166 | case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...)); |
4167 | case 5: return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...)); |
4168 | #endif |
4169 | default:return_trace (c->default_return_value ()); |
4170 | } |
4171 | } |
4172 | |
4173 | protected: |
4174 | union { |
4175 | HBUINT16 format; /* Format identifier */ |
4176 | ChainContextFormat1_4<SmallTypes> format1; |
4177 | ChainContextFormat2_5<SmallTypes> format2; |
4178 | ChainContextFormat3 format3; |
4179 | #ifndef HB_NO_BEYOND_64K |
4180 | ChainContextFormat1_4<MediumTypes> format4; |
4181 | ChainContextFormat2_5<MediumTypes> format5; |
4182 | #endif |
4183 | } u; |
4184 | }; |
4185 | |
4186 | |
4187 | template <typename T> |
4188 | struct ExtensionFormat1 |
4189 | { |
4190 | unsigned int get_type () const { return extensionLookupType; } |
4191 | |
4192 | template <typename X> |
4193 | const X& get_subtable () const |
4194 | { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); } |
4195 | |
4196 | template <typename context_t, typename ...Ts> |
4197 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
4198 | { |
4199 | if (unlikely (!c->may_dispatch (this, this))) return c->no_dispatch_return_value (); |
4200 | TRACE_DISPATCH (this, format); |
4201 | return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...)); |
4202 | } |
4203 | |
4204 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const |
4205 | { dispatch (c); } |
4206 | |
4207 | /* This is called from may_dispatch() above with hb_sanitize_context_t. */ |
4208 | bool sanitize (hb_sanitize_context_t *c) const |
4209 | { |
4210 | TRACE_SANITIZE (this); |
4211 | return_trace (c->check_struct (this) && |
4212 | extensionLookupType != T::SubTable::Extension); |
4213 | } |
4214 | |
4215 | bool subset (hb_subset_context_t *c) const |
4216 | { |
4217 | TRACE_SUBSET (this); |
4218 | |
4219 | auto *out = c->serializer->start_embed (this); |
4220 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
4221 | |
4222 | out->format = format; |
4223 | out->extensionLookupType = extensionLookupType; |
4224 | |
4225 | const auto& src_offset = |
4226 | reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); |
4227 | auto& dest_offset = |
4228 | reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset); |
4229 | |
4230 | return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ())); |
4231 | } |
4232 | |
4233 | protected: |
4234 | HBUINT16 format; /* Format identifier. Set to 1. */ |
4235 | HBUINT16 extensionLookupType; /* Lookup type of subtable referenced |
4236 | * by ExtensionOffset (i.e. the |
4237 | * extension subtable). */ |
4238 | Offset32 extensionOffset; /* Offset to the extension subtable, |
4239 | * of lookup type subtable. */ |
4240 | public: |
4241 | DEFINE_SIZE_STATIC (8); |
4242 | }; |
4243 | |
4244 | template <typename T> |
4245 | struct Extension |
4246 | { |
4247 | unsigned int get_type () const |
4248 | { |
4249 | switch (u.format) { |
4250 | case 1: return u.format1.get_type (); |
4251 | default:return 0; |
4252 | } |
4253 | } |
4254 | template <typename X> |
4255 | const X& get_subtable () const |
4256 | { |
4257 | switch (u.format) { |
4258 | case 1: return u.format1.template get_subtable<typename T::SubTable> (); |
4259 | default:return Null (typename T::SubTable); |
4260 | } |
4261 | } |
4262 | |
4263 | // Specialization of dispatch for subset. dispatch() normally just |
4264 | // dispatches to the sub table this points too, but for subset |
4265 | // we need to run subset on this subtable too. |
4266 | template <typename ...Ts> |
4267 | typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const |
4268 | { |
4269 | switch (u.format) { |
4270 | case 1: return u.format1.subset (c); |
4271 | default: return c->default_return_value (); |
4272 | } |
4273 | } |
4274 | |
4275 | template <typename context_t, typename ...Ts> |
4276 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
4277 | { |
4278 | if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value (); |
4279 | TRACE_DISPATCH (this, u.format); |
4280 | switch (u.format) { |
4281 | case 1: return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...)); |
4282 | default:return_trace (c->default_return_value ()); |
4283 | } |
4284 | } |
4285 | |
4286 | protected: |
4287 | union { |
4288 | HBUINT16 format; /* Format identifier */ |
4289 | ExtensionFormat1<T> format1; |
4290 | } u; |
4291 | }; |
4292 | |
4293 | |
4294 | /* |
4295 | * GSUB/GPOS Common |
4296 | */ |
4297 | |
4298 | struct hb_ot_layout_lookup_accelerator_t |
4299 | { |
4300 | template <typename TLookup> |
4301 | static hb_ot_layout_lookup_accelerator_t *create (const TLookup &lookup) |
4302 | { |
4303 | unsigned count = lookup.get_subtable_count (); |
4304 | |
4305 | unsigned size = sizeof (hb_ot_layout_lookup_accelerator_t) - |
4306 | HB_VAR_ARRAY * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t) + |
4307 | count * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t); |
4308 | |
4309 | /* The following is a calloc because when we are collecting subtables, |
4310 | * some of them might be invalid and hence not collect; as a result, |
4311 | * we might not fill in all the count entries of the subtables array. |
4312 | * Zeroing it allows the set digest to gatekeep it without having to |
4313 | * initialize it further. */ |
4314 | auto *thiz = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (1, size); |
4315 | if (unlikely (!thiz)) |
4316 | return nullptr; |
4317 | |
4318 | hb_accelerate_subtables_context_t c_accelerate_subtables (thiz->subtables); |
4319 | lookup.dispatch (&c_accelerate_subtables); |
4320 | |
4321 | thiz->digest.init (); |
4322 | for (auto& subtable : hb_iter (thiz->subtables, count)) |
4323 | thiz->digest.add (subtable.digest); |
4324 | |
4325 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
4326 | thiz->cache_user_idx = c_accelerate_subtables.cache_user_idx; |
4327 | for (unsigned i = 0; i < count; i++) |
4328 | if (i != thiz->cache_user_idx) |
4329 | thiz->subtables[i].apply_cached_func = thiz->subtables[i].apply_func; |
4330 | #endif |
4331 | |
4332 | return thiz; |
4333 | } |
4334 | |
4335 | bool may_have (hb_codepoint_t g) const |
4336 | { return digest.may_have (g); } |
4337 | |
4338 | #ifndef HB_OPTIMIZE_SIZE |
4339 | HB_ALWAYS_INLINE |
4340 | #endif |
4341 | bool apply (hb_ot_apply_context_t *c, unsigned subtables_count, bool use_cache) const |
4342 | { |
4343 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
4344 | if (use_cache) |
4345 | { |
4346 | return |
4347 | + hb_iter (hb_iter (subtables, subtables_count)) |
4348 | | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); }) |
4349 | | hb_any |
4350 | ; |
4351 | } |
4352 | else |
4353 | #endif |
4354 | { |
4355 | return |
4356 | + hb_iter (hb_iter (subtables, subtables_count)) |
4357 | | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); }) |
4358 | | hb_any |
4359 | ; |
4360 | } |
4361 | return false; |
4362 | } |
4363 | |
4364 | bool cache_enter (hb_ot_apply_context_t *c) const |
4365 | { |
4366 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
4367 | return cache_user_idx != (unsigned) -1 && |
4368 | subtables[cache_user_idx].cache_enter (c); |
4369 | #else |
4370 | return false; |
4371 | #endif |
4372 | } |
4373 | void cache_leave (hb_ot_apply_context_t *c) const |
4374 | { |
4375 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
4376 | subtables[cache_user_idx].cache_leave (c); |
4377 | #endif |
4378 | } |
4379 | |
4380 | |
4381 | hb_set_digest_t digest; |
4382 | private: |
4383 | #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE |
4384 | unsigned cache_user_idx = (unsigned) -1; |
4385 | #endif |
4386 | hb_accelerate_subtables_context_t::hb_applicable_t subtables[HB_VAR_ARRAY]; |
4387 | }; |
4388 | |
4389 | template <typename Types> |
4390 | struct GSUBGPOSVersion1_2 |
4391 | { |
4392 | friend struct GSUBGPOS; |
4393 | |
4394 | protected: |
4395 | FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set |
4396 | * to 0x00010000u */ |
4397 | typename Types:: template OffsetTo<ScriptList> |
4398 | scriptList; /* ScriptList table */ |
4399 | typename Types::template OffsetTo<FeatureList> |
4400 | featureList; /* FeatureList table */ |
4401 | typename Types::template OffsetTo<LookupList<Types>> |
4402 | lookupList; /* LookupList table */ |
4403 | Offset32To<FeatureVariations> |
4404 | featureVars; /* Offset to Feature Variations |
4405 | table--from beginning of table |
4406 | * (may be NULL). Introduced |
4407 | * in version 0x00010001. */ |
4408 | public: |
4409 | DEFINE_SIZE_MIN (4 + 3 * Types::size); |
4410 | |
4411 | unsigned int get_size () const |
4412 | { |
4413 | return min_size + |
4414 | (version.to_int () >= 0x00010001u ? featureVars.static_size : 0); |
4415 | } |
4416 | |
4417 | const typename Types::template OffsetTo<LookupList<Types>>* get_lookup_list_offset () const |
4418 | { |
4419 | return &lookupList; |
4420 | } |
4421 | |
4422 | template <typename TLookup> |
4423 | bool sanitize (hb_sanitize_context_t *c) const |
4424 | { |
4425 | TRACE_SANITIZE (this); |
4426 | typedef List16OfOffsetTo<TLookup, typename Types::HBUINT> TLookupList; |
4427 | if (unlikely (!(scriptList.sanitize (c, this) && |
4428 | featureList.sanitize (c, this) && |
4429 | reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList).sanitize (c, this)))) |
4430 | return_trace (false); |
4431 | |
4432 | #ifndef HB_NO_VAR |
4433 | if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this)))) |
4434 | return_trace (false); |
4435 | #endif |
4436 | |
4437 | return_trace (true); |
4438 | } |
4439 | |
4440 | template <typename TLookup> |
4441 | bool subset (hb_subset_layout_context_t *c) const |
4442 | { |
4443 | TRACE_SUBSET (this); |
4444 | |
4445 | auto *out = c->subset_context->serializer->start_embed (this); |
4446 | if (unlikely (!c->subset_context->serializer->extend_min (out))) return_trace (false); |
4447 | |
4448 | out->version = version; |
4449 | |
4450 | typedef LookupOffsetList<TLookup, typename Types::HBUINT> TLookupList; |
4451 | reinterpret_cast<typename Types::template OffsetTo<TLookupList> &> (out->lookupList) |
4452 | .serialize_subset (c->subset_context, |
4453 | reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList), |
4454 | this, |
4455 | c); |
4456 | |
4457 | reinterpret_cast<typename Types::template OffsetTo<RecordListOfFeature> &> (out->featureList) |
4458 | .serialize_subset (c->subset_context, |
4459 | reinterpret_cast<const typename Types::template OffsetTo<RecordListOfFeature> &> (featureList), |
4460 | this, |
4461 | c); |
4462 | |
4463 | out->scriptList.serialize_subset (c->subset_context, |
4464 | scriptList, |
4465 | this, |
4466 | c); |
4467 | |
4468 | #ifndef HB_NO_VAR |
4469 | if (version.to_int () >= 0x00010001u) |
4470 | { |
4471 | auto snapshot = c->subset_context->serializer->snapshot (); |
4472 | if (!c->subset_context->serializer->extend_min (&out->featureVars)) |
4473 | return_trace (false); |
4474 | |
4475 | // TODO(qxliu76): the current implementation doesn't correctly handle feature variations |
4476 | // that are dropped by instancing when the associated conditions don't trigger. |
4477 | // Since partial instancing isn't yet supported this isn't an issue yet but will |
4478 | // need to be fixed for partial instancing. |
4479 | |
4480 | |
4481 | |
4482 | // if all axes are pinned all feature vars are dropped. |
4483 | bool ret = !c->subset_context->plan->all_axes_pinned |
4484 | && out->featureVars.serialize_subset (c->subset_context, featureVars, this, c); |
4485 | if (!ret && version.major == 1) |
4486 | { |
4487 | c->subset_context->serializer->revert (snapshot); |
4488 | out->version.major = 1; |
4489 | out->version.minor = 0; |
4490 | } |
4491 | } |
4492 | #endif |
4493 | |
4494 | return_trace (true); |
4495 | } |
4496 | }; |
4497 | |
4498 | struct GSUBGPOS |
4499 | { |
4500 | unsigned int get_size () const |
4501 | { |
4502 | switch (u.version.major) { |
4503 | case 1: return u.version1.get_size (); |
4504 | #ifndef HB_NO_BEYOND_64K |
4505 | case 2: return u.version2.get_size (); |
4506 | #endif |
4507 | default: return u.version.static_size; |
4508 | } |
4509 | } |
4510 | |
4511 | template <typename TLookup> |
4512 | bool sanitize (hb_sanitize_context_t *c) const |
4513 | { |
4514 | TRACE_SANITIZE (this); |
4515 | if (unlikely (!u.version.sanitize (c))) return_trace (false); |
4516 | switch (u.version.major) { |
4517 | case 1: return_trace (u.version1.sanitize<TLookup> (c)); |
4518 | #ifndef HB_NO_BEYOND_64K |
4519 | case 2: return_trace (u.version2.sanitize<TLookup> (c)); |
4520 | #endif |
4521 | default: return_trace (true); |
4522 | } |
4523 | } |
4524 | |
4525 | template <typename TLookup> |
4526 | bool subset (hb_subset_layout_context_t *c) const |
4527 | { |
4528 | switch (u.version.major) { |
4529 | case 1: return u.version1.subset<TLookup> (c); |
4530 | #ifndef HB_NO_BEYOND_64K |
4531 | case 2: return u.version2.subset<TLookup> (c); |
4532 | #endif |
4533 | default: return false; |
4534 | } |
4535 | } |
4536 | |
4537 | const ScriptList &get_script_list () const |
4538 | { |
4539 | switch (u.version.major) { |
4540 | case 1: return this+u.version1.scriptList; |
4541 | #ifndef HB_NO_BEYOND_64K |
4542 | case 2: return this+u.version2.scriptList; |
4543 | #endif |
4544 | default: return Null (ScriptList); |
4545 | } |
4546 | } |
4547 | const FeatureList &get_feature_list () const |
4548 | { |
4549 | switch (u.version.major) { |
4550 | case 1: return this+u.version1.featureList; |
4551 | #ifndef HB_NO_BEYOND_64K |
4552 | case 2: return this+u.version2.featureList; |
4553 | #endif |
4554 | default: return Null (FeatureList); |
4555 | } |
4556 | } |
4557 | unsigned int get_lookup_count () const |
4558 | { |
4559 | switch (u.version.major) { |
4560 | case 1: return (this+u.version1.lookupList).len; |
4561 | #ifndef HB_NO_BEYOND_64K |
4562 | case 2: return (this+u.version2.lookupList).len; |
4563 | #endif |
4564 | default: return 0; |
4565 | } |
4566 | } |
4567 | const Lookup& get_lookup (unsigned int i) const |
4568 | { |
4569 | switch (u.version.major) { |
4570 | case 1: return (this+u.version1.lookupList)[i]; |
4571 | #ifndef HB_NO_BEYOND_64K |
4572 | case 2: return (this+u.version2.lookupList)[i]; |
4573 | #endif |
4574 | default: return Null (Lookup); |
4575 | } |
4576 | } |
4577 | const FeatureVariations &get_feature_variations () const |
4578 | { |
4579 | switch (u.version.major) { |
4580 | case 1: return (u.version.to_int () >= 0x00010001u ? this+u.version1.featureVars : Null (FeatureVariations)); |
4581 | #ifndef HB_NO_BEYOND_64K |
4582 | case 2: return this+u.version2.featureVars; |
4583 | #endif |
4584 | default: return Null (FeatureVariations); |
4585 | } |
4586 | } |
4587 | |
4588 | bool has_data () const { return u.version.to_int (); } |
4589 | unsigned int get_script_count () const |
4590 | { return get_script_list ().len; } |
4591 | const Tag& get_script_tag (unsigned int i) const |
4592 | { return get_script_list ().get_tag (i); } |
4593 | unsigned int get_script_tags (unsigned int start_offset, |
4594 | unsigned int *script_count /* IN/OUT */, |
4595 | hb_tag_t *script_tags /* OUT */) const |
4596 | { return get_script_list ().get_tags (start_offset, script_count, script_tags); } |
4597 | const Script& get_script (unsigned int i) const |
4598 | { return get_script_list ()[i]; } |
4599 | bool find_script_index (hb_tag_t tag, unsigned int *index) const |
4600 | { return get_script_list ().find_index (tag, index); } |
4601 | |
4602 | unsigned int get_feature_count () const |
4603 | { return get_feature_list ().len; } |
4604 | hb_tag_t get_feature_tag (unsigned int i) const |
4605 | { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : get_feature_list ().get_tag (i); } |
4606 | unsigned int get_feature_tags (unsigned int start_offset, |
4607 | unsigned int *feature_count /* IN/OUT */, |
4608 | hb_tag_t *feature_tags /* OUT */) const |
4609 | { return get_feature_list ().get_tags (start_offset, feature_count, feature_tags); } |
4610 | const Feature& get_feature (unsigned int i) const |
4611 | { return get_feature_list ()[i]; } |
4612 | bool find_feature_index (hb_tag_t tag, unsigned int *index) const |
4613 | { return get_feature_list ().find_index (tag, index); } |
4614 | |
4615 | bool find_variations_index (const int *coords, unsigned int num_coords, |
4616 | unsigned int *index) const |
4617 | { |
4618 | #ifdef HB_NO_VAR |
4619 | *index = FeatureVariations::NOT_FOUND_INDEX; |
4620 | return false; |
4621 | #endif |
4622 | return get_feature_variations ().find_index (coords, num_coords, index); |
4623 | } |
4624 | const Feature& get_feature_variation (unsigned int feature_index, |
4625 | unsigned int variations_index) const |
4626 | { |
4627 | #ifndef HB_NO_VAR |
4628 | if (FeatureVariations::NOT_FOUND_INDEX != variations_index && |
4629 | u.version.to_int () >= 0x00010001u) |
4630 | { |
4631 | const Feature *feature = get_feature_variations ().find_substitute (variations_index, |
4632 | feature_index); |
4633 | if (feature) |
4634 | return *feature; |
4635 | } |
4636 | #endif |
4637 | return get_feature (feature_index); |
4638 | } |
4639 | |
4640 | void feature_variation_collect_lookups (const hb_set_t *feature_indexes, |
4641 | const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, |
4642 | hb_set_t *lookup_indexes /* OUT */) const |
4643 | { |
4644 | #ifndef HB_NO_VAR |
4645 | get_feature_variations ().collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes); |
4646 | #endif |
4647 | } |
4648 | |
4649 | #ifndef HB_NO_VAR |
4650 | void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const |
4651 | { get_feature_variations ().collect_feature_substitutes_with_variations (c); } |
4652 | #endif |
4653 | |
4654 | template <typename TLookup> |
4655 | void closure_lookups (hb_face_t *face, |
4656 | const hb_set_t *glyphs, |
4657 | hb_set_t *lookup_indexes /* IN/OUT */) const |
4658 | { |
4659 | hb_set_t visited_lookups, inactive_lookups; |
4660 | hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups); |
4661 | |
4662 | c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>); |
4663 | |
4664 | for (unsigned lookup_index : *lookup_indexes) |
4665 | reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index); |
4666 | |
4667 | hb_set_union (lookup_indexes, &visited_lookups); |
4668 | hb_set_subtract (lookup_indexes, &inactive_lookups); |
4669 | } |
4670 | |
4671 | void prune_langsys (const hb_map_t *duplicate_feature_map, |
4672 | const hb_set_t *layout_scripts, |
4673 | hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map, |
4674 | hb_set_t *new_feature_indexes /* OUT */) const |
4675 | { |
4676 | hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes); |
4677 | |
4678 | unsigned count = get_script_count (); |
4679 | for (unsigned script_index = 0; script_index < count; script_index++) |
4680 | { |
4681 | const Tag& tag = get_script_tag (script_index); |
4682 | if (!layout_scripts->has (tag)) continue; |
4683 | const Script& s = get_script (script_index); |
4684 | s.prune_langsys (&c, script_index); |
4685 | } |
4686 | } |
4687 | |
4688 | void prune_features (const hb_map_t *lookup_indices, /* IN */ |
4689 | const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* IN */ |
4690 | const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, /* IN */ |
4691 | hb_set_t *feature_indices /* IN/OUT */) const |
4692 | { |
4693 | #ifndef HB_NO_VAR |
4694 | // This is the set of feature indices which have alternate versions defined |
4695 | // if the FeatureVariation's table and the alternate version(s) intersect the |
4696 | // set of lookup indices. |
4697 | hb_set_t alternate_feature_indices; |
4698 | get_feature_variations ().closure_features (lookup_indices, feature_record_cond_idx_map, &alternate_feature_indices); |
4699 | if (unlikely (alternate_feature_indices.in_error())) |
4700 | { |
4701 | feature_indices->err (); |
4702 | return; |
4703 | } |
4704 | #endif |
4705 | |
4706 | for (unsigned i : hb_iter (feature_indices)) |
4707 | { |
4708 | hb_tag_t tag = get_feature_tag (i); |
4709 | if (tag == HB_TAG ('p', 'r', 'e', 'f')) |
4710 | // Note: Never ever drop feature 'pref', even if it's empty. |
4711 | // HarfBuzz chooses shaper for Khmer based on presence of this |
4712 | // feature. See thread at: |
4713 | // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html |
4714 | continue; |
4715 | |
4716 | |
4717 | const Feature *f = &(get_feature (i)); |
4718 | const Feature** p = nullptr; |
4719 | if (feature_substitutes_map->has (i, &p)) |
4720 | f = *p; |
4721 | |
4722 | if (!f->featureParams.is_null () && |
4723 | tag == HB_TAG ('s', 'i', 'z', 'e')) |
4724 | continue; |
4725 | |
4726 | if (!f->intersects_lookup_indexes (lookup_indices) |
4727 | #ifndef HB_NO_VAR |
4728 | && !alternate_feature_indices.has (i) |
4729 | #endif |
4730 | ) |
4731 | feature_indices->del (i); |
4732 | } |
4733 | } |
4734 | |
4735 | void collect_name_ids (const hb_map_t *feature_index_map, |
4736 | hb_set_t *nameids_to_retain /* OUT */) const |
4737 | { |
4738 | unsigned count = get_feature_count (); |
4739 | for (unsigned i = 0 ; i < count; i++) |
4740 | { |
4741 | if (!feature_index_map->has (i)) continue; |
4742 | hb_tag_t tag = get_feature_tag (i); |
4743 | get_feature (i).collect_name_ids (tag, nameids_to_retain); |
4744 | } |
4745 | } |
4746 | |
4747 | template <typename T> |
4748 | struct accelerator_t |
4749 | { |
4750 | accelerator_t (hb_face_t *face) |
4751 | { |
4752 | hb_sanitize_context_t sc; |
4753 | sc.lazy_some_gpos = true; |
4754 | this->table = sc.reference_table<T> (face); |
4755 | |
4756 | if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face))) |
4757 | { |
4758 | hb_blob_destroy (this->table.get_blob ()); |
4759 | this->table = hb_blob_get_empty (); |
4760 | } |
4761 | |
4762 | this->lookup_count = table->get_lookup_count (); |
4763 | |
4764 | this->accels = (hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *) hb_calloc (this->lookup_count, sizeof (*accels)); |
4765 | if (unlikely (!this->accels)) |
4766 | { |
4767 | this->lookup_count = 0; |
4768 | this->table.destroy (); |
4769 | this->table = hb_blob_get_empty (); |
4770 | } |
4771 | } |
4772 | ~accelerator_t () |
4773 | { |
4774 | for (unsigned int i = 0; i < this->lookup_count; i++) |
4775 | hb_free (this->accels[i]); |
4776 | hb_free (this->accels); |
4777 | this->table.destroy (); |
4778 | } |
4779 | |
4780 | hb_blob_t *get_blob () const { return table.get_blob (); } |
4781 | |
4782 | hb_ot_layout_lookup_accelerator_t *get_accel (unsigned lookup_index) const |
4783 | { |
4784 | if (unlikely (lookup_index >= lookup_count)) return nullptr; |
4785 | |
4786 | retry: |
4787 | auto *accel = accels[lookup_index].get_acquire (); |
4788 | if (unlikely (!accel)) |
4789 | { |
4790 | accel = hb_ot_layout_lookup_accelerator_t::create (table->get_lookup (lookup_index)); |
4791 | if (unlikely (!accel)) |
4792 | return nullptr; |
4793 | |
4794 | if (unlikely (!accels[lookup_index].cmpexch (nullptr, accel))) |
4795 | { |
4796 | hb_free (accel); |
4797 | goto retry; |
4798 | } |
4799 | } |
4800 | |
4801 | return accel; |
4802 | } |
4803 | |
4804 | hb_blob_ptr_t<T> table; |
4805 | unsigned int lookup_count; |
4806 | hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *accels; |
4807 | }; |
4808 | |
4809 | protected: |
4810 | union { |
4811 | FixedVersion<> version; /* Version identifier */ |
4812 | GSUBGPOSVersion1_2<SmallTypes> version1; |
4813 | #ifndef HB_NO_BEYOND_64K |
4814 | GSUBGPOSVersion1_2<MediumTypes> version2; |
4815 | #endif |
4816 | } u; |
4817 | public: |
4818 | DEFINE_SIZE_MIN (4); |
4819 | }; |
4820 | |
4821 | |
4822 | } /* namespace OT */ |
4823 | |
4824 | |
4825 | #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */ |
4826 | |