1 | /* |
2 | * Copyright © 2007,2008,2009,2010 Red Hat, Inc. |
3 | * Copyright © 2010,2012 Google, Inc. |
4 | * |
5 | * This is part of HarfBuzz, a text shaping library. |
6 | * |
7 | * Permission is hereby granted, without written agreement and without |
8 | * license or royalty fees, to use, copy, modify, and distribute this |
9 | * software and its documentation for any purpose, provided that the |
10 | * above copyright notice and the following two paragraphs appear in |
11 | * all copies of this software. |
12 | * |
13 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | * DAMAGE. |
18 | * |
19 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | * |
25 | * Red Hat Author(s): Behdad Esfahbod |
26 | * Google Author(s): Behdad Esfahbod |
27 | */ |
28 | |
29 | #ifndef HB_OT_LAYOUT_GSUBGPOS_HH |
30 | #define HB_OT_LAYOUT_GSUBGPOS_HH |
31 | |
32 | #include "hb.hh" |
33 | #include "hb-buffer.hh" |
34 | #include "hb-map.hh" |
35 | #include "hb-set.hh" |
36 | #include "hb-ot-layout-common.hh" |
37 | #include "hb-ot-layout-gdef-table.hh" |
38 | |
39 | |
40 | namespace OT { |
41 | |
42 | |
43 | struct hb_intersects_context_t : |
44 | hb_dispatch_context_t<hb_intersects_context_t, bool, 0> |
45 | { |
46 | inline const char *get_name (void) { return "INTERSECTS" ; } |
47 | template <typename T> |
48 | inline return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); } |
49 | static return_t default_return_value (void) { return false; } |
50 | bool stop_sublookup_iteration (return_t r) const { return r; } |
51 | |
52 | const hb_set_t *glyphs; |
53 | unsigned int debug_depth; |
54 | |
55 | hb_intersects_context_t (const hb_set_t *glyphs_) : |
56 | glyphs (glyphs_), |
57 | debug_depth (0) {} |
58 | }; |
59 | |
60 | struct hb_closure_context_t : |
61 | hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE> |
62 | { |
63 | inline const char *get_name (void) { return "CLOSURE" ; } |
64 | typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); |
65 | template <typename T> |
66 | inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } |
67 | static return_t default_return_value (void) { return HB_VOID; } |
68 | bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } |
69 | void recurse (unsigned int lookup_index) |
70 | { |
71 | if (unlikely (nesting_level_left == 0 || !recurse_func)) |
72 | return; |
73 | |
74 | nesting_level_left--; |
75 | recurse_func (this, lookup_index); |
76 | nesting_level_left++; |
77 | } |
78 | |
79 | bool should_visit_lookup (unsigned int lookup_index) |
80 | { |
81 | if (is_lookup_done (lookup_index)) |
82 | return false; |
83 | done_lookups->set (lookup_index, glyphs->get_population ()); |
84 | return true; |
85 | } |
86 | |
87 | bool is_lookup_done (unsigned int lookup_index) |
88 | { |
89 | /* Have we visited this lookup with the current set of glyphs? */ |
90 | return done_lookups->get (lookup_index) == glyphs->get_population (); |
91 | } |
92 | |
93 | hb_face_t *face; |
94 | hb_set_t *glyphs; |
95 | hb_auto_t<hb_set_t> out[1]; |
96 | recurse_func_t recurse_func; |
97 | unsigned int nesting_level_left; |
98 | unsigned int debug_depth; |
99 | |
100 | hb_closure_context_t (hb_face_t *face_, |
101 | hb_set_t *glyphs_, |
102 | hb_map_t *done_lookups_, |
103 | unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : |
104 | face (face_), |
105 | glyphs (glyphs_), |
106 | recurse_func (nullptr), |
107 | nesting_level_left (nesting_level_left_), |
108 | debug_depth (0), |
109 | done_lookups (done_lookups_) {} |
110 | |
111 | ~hb_closure_context_t (void) |
112 | { |
113 | flush (); |
114 | } |
115 | |
116 | void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
117 | |
118 | void flush (void) |
119 | { |
120 | hb_set_union (glyphs, out); |
121 | hb_set_clear (out); |
122 | } |
123 | |
124 | private: |
125 | hb_map_t *done_lookups; |
126 | }; |
127 | |
128 | |
129 | struct hb_would_apply_context_t : |
130 | hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY> |
131 | { |
132 | inline const char *get_name (void) { return "WOULD_APPLY" ; } |
133 | template <typename T> |
134 | inline return_t dispatch (const T &obj) { return obj.would_apply (this); } |
135 | static return_t default_return_value (void) { return false; } |
136 | bool stop_sublookup_iteration (return_t r) const { return r; } |
137 | |
138 | hb_face_t *face; |
139 | const hb_codepoint_t *glyphs; |
140 | unsigned int len; |
141 | bool zero_context; |
142 | unsigned int debug_depth; |
143 | |
144 | hb_would_apply_context_t (hb_face_t *face_, |
145 | const hb_codepoint_t *glyphs_, |
146 | unsigned int len_, |
147 | bool zero_context_) : |
148 | face (face_), |
149 | glyphs (glyphs_), |
150 | len (len_), |
151 | zero_context (zero_context_), |
152 | debug_depth (0) {} |
153 | }; |
154 | |
155 | |
156 | struct hb_collect_glyphs_context_t : |
157 | hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS> |
158 | { |
159 | inline const char *get_name (void) { return "COLLECT_GLYPHS" ; } |
160 | typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); |
161 | template <typename T> |
162 | inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } |
163 | static return_t default_return_value (void) { return HB_VOID; } |
164 | bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } |
165 | void recurse (unsigned int lookup_index) |
166 | { |
167 | if (unlikely (nesting_level_left == 0 || !recurse_func)) |
168 | return; |
169 | |
170 | /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get |
171 | * past the previous check. For GSUB, we only want to collect the output |
172 | * glyphs in the recursion. If output is not requested, we can go home now. |
173 | * |
174 | * Note further, that the above is not exactly correct. A recursed lookup |
175 | * is allowed to match input that is not matched in the context, but that's |
176 | * not how most fonts are built. It's possible to relax that and recurse |
177 | * with all sets here if it proves to be an issue. |
178 | */ |
179 | |
180 | if (output == hb_set_get_empty ()) |
181 | return; |
182 | |
183 | /* Return if new lookup was recursed to before. */ |
184 | if (recursed_lookups->has (lookup_index)) |
185 | return; |
186 | |
187 | hb_set_t *old_before = before; |
188 | hb_set_t *old_input = input; |
189 | hb_set_t *old_after = after; |
190 | before = input = after = hb_set_get_empty (); |
191 | |
192 | nesting_level_left--; |
193 | recurse_func (this, lookup_index); |
194 | nesting_level_left++; |
195 | |
196 | before = old_before; |
197 | input = old_input; |
198 | after = old_after; |
199 | |
200 | recursed_lookups->add (lookup_index); |
201 | |
202 | return; |
203 | } |
204 | |
205 | hb_face_t *face; |
206 | hb_set_t *before; |
207 | hb_set_t *input; |
208 | hb_set_t *after; |
209 | hb_set_t *output; |
210 | recurse_func_t recurse_func; |
211 | hb_set_t *recursed_lookups; |
212 | unsigned int nesting_level_left; |
213 | unsigned int debug_depth; |
214 | |
215 | hb_collect_glyphs_context_t (hb_face_t *face_, |
216 | hb_set_t *glyphs_before, /* OUT. May be nullptr */ |
217 | hb_set_t *glyphs_input, /* OUT. May be nullptr */ |
218 | hb_set_t *glyphs_after, /* OUT. May be nullptr */ |
219 | hb_set_t *glyphs_output, /* OUT. May be nullptr */ |
220 | unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : |
221 | face (face_), |
222 | before (glyphs_before ? glyphs_before : hb_set_get_empty ()), |
223 | input (glyphs_input ? glyphs_input : hb_set_get_empty ()), |
224 | after (glyphs_after ? glyphs_after : hb_set_get_empty ()), |
225 | output (glyphs_output ? glyphs_output : hb_set_get_empty ()), |
226 | recurse_func (nullptr), |
227 | recursed_lookups (hb_set_create ()), |
228 | nesting_level_left (nesting_level_left_), |
229 | debug_depth (0) {} |
230 | ~hb_collect_glyphs_context_t (void) { hb_set_destroy (recursed_lookups); } |
231 | |
232 | void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
233 | }; |
234 | |
235 | |
236 | |
237 | template <typename set_t> |
238 | struct hb_add_coverage_context_t : |
239 | hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE> |
240 | { |
241 | inline const char *get_name (void) { return "GET_COVERAGE" ; } |
242 | typedef const Coverage &return_t; |
243 | template <typename T> |
244 | inline return_t dispatch (const T &obj) { return obj.get_coverage (); } |
245 | static return_t default_return_value (void) { return Null(Coverage); } |
246 | bool stop_sublookup_iteration (return_t r) const |
247 | { |
248 | r.add_coverage (set); |
249 | return false; |
250 | } |
251 | |
252 | hb_add_coverage_context_t (set_t *set_) : |
253 | set (set_), |
254 | debug_depth (0) {} |
255 | |
256 | set_t *set; |
257 | unsigned int debug_depth; |
258 | }; |
259 | |
260 | |
261 | struct hb_ot_apply_context_t : |
262 | hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY> |
263 | { |
264 | struct matcher_t |
265 | { |
266 | inline matcher_t (void) : |
267 | lookup_props (0), |
268 | ignore_zwnj (false), |
269 | ignore_zwj (false), |
270 | mask (-1), |
271 | #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ |
272 | syllable arg1(0), |
273 | #undef arg1 |
274 | match_func (nullptr), |
275 | match_data (nullptr) {}; |
276 | |
277 | typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data); |
278 | |
279 | inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } |
280 | inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } |
281 | inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } |
282 | inline void set_mask (hb_mask_t mask_) { mask = mask_; } |
283 | inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } |
284 | inline void set_match_func (match_func_t match_func_, |
285 | const void *match_data_) |
286 | { match_func = match_func_; match_data = match_data_; } |
287 | |
288 | enum may_match_t { |
289 | MATCH_NO, |
290 | MATCH_YES, |
291 | MATCH_MAYBE |
292 | }; |
293 | |
294 | inline may_match_t may_match (const hb_glyph_info_t &info, |
295 | const HBUINT16 *glyph_data) const |
296 | { |
297 | if (!(info.mask & mask) || |
298 | (syllable && syllable != info.syllable ())) |
299 | return MATCH_NO; |
300 | |
301 | if (match_func) |
302 | return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; |
303 | |
304 | return MATCH_MAYBE; |
305 | } |
306 | |
307 | enum may_skip_t { |
308 | SKIP_NO, |
309 | SKIP_YES, |
310 | SKIP_MAYBE |
311 | }; |
312 | |
313 | inline may_skip_t |
314 | may_skip (const hb_ot_apply_context_t *c, |
315 | const hb_glyph_info_t &info) const |
316 | { |
317 | if (!c->check_glyph_property (&info, lookup_props)) |
318 | return SKIP_YES; |
319 | |
320 | if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) && |
321 | (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && |
322 | (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) |
323 | return SKIP_MAYBE; |
324 | |
325 | return SKIP_NO; |
326 | } |
327 | |
328 | protected: |
329 | unsigned int lookup_props; |
330 | bool ignore_zwnj; |
331 | bool ignore_zwj; |
332 | hb_mask_t mask; |
333 | uint8_t syllable; |
334 | match_func_t match_func; |
335 | const void *match_data; |
336 | }; |
337 | |
338 | struct skipping_iterator_t |
339 | { |
340 | inline void init (hb_ot_apply_context_t *c_, bool context_match = false) |
341 | { |
342 | c = c_; |
343 | match_glyph_data = nullptr; |
344 | matcher.set_match_func (nullptr, nullptr); |
345 | matcher.set_lookup_props (c->lookup_props); |
346 | /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ |
347 | matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj)); |
348 | /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ |
349 | matcher.set_ignore_zwj (c->table_index == 1 || (context_match || c->auto_zwj)); |
350 | matcher.set_mask (context_match ? -1 : c->lookup_mask); |
351 | } |
352 | inline void set_lookup_props (unsigned int lookup_props) |
353 | { |
354 | matcher.set_lookup_props (lookup_props); |
355 | } |
356 | inline void set_match_func (matcher_t::match_func_t match_func_, |
357 | const void *match_data_, |
358 | const HBUINT16 glyph_data[]) |
359 | { |
360 | matcher.set_match_func (match_func_, match_data_); |
361 | match_glyph_data = glyph_data; |
362 | } |
363 | |
364 | inline void reset (unsigned int start_index_, |
365 | unsigned int num_items_) |
366 | { |
367 | idx = start_index_; |
368 | num_items = num_items_; |
369 | end = c->buffer->len; |
370 | matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); |
371 | } |
372 | |
373 | inline void reject (void) { num_items++; match_glyph_data--; } |
374 | |
375 | inline matcher_t::may_skip_t |
376 | may_skip (const hb_glyph_info_t &info) const |
377 | { |
378 | return matcher.may_skip (c, info); |
379 | } |
380 | |
381 | inline bool next (void) |
382 | { |
383 | assert (num_items > 0); |
384 | while (idx + num_items < end) |
385 | { |
386 | idx++; |
387 | const hb_glyph_info_t &info = c->buffer->info[idx]; |
388 | |
389 | matcher_t::may_skip_t skip = matcher.may_skip (c, info); |
390 | if (unlikely (skip == matcher_t::SKIP_YES)) |
391 | continue; |
392 | |
393 | matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); |
394 | if (match == matcher_t::MATCH_YES || |
395 | (match == matcher_t::MATCH_MAYBE && |
396 | skip == matcher_t::SKIP_NO)) |
397 | { |
398 | num_items--; |
399 | match_glyph_data++; |
400 | return true; |
401 | } |
402 | |
403 | if (skip == matcher_t::SKIP_NO) |
404 | return false; |
405 | } |
406 | return false; |
407 | } |
408 | inline bool prev (void) |
409 | { |
410 | assert (num_items > 0); |
411 | while (idx > num_items - 1) |
412 | { |
413 | idx--; |
414 | const hb_glyph_info_t &info = c->buffer->out_info[idx]; |
415 | |
416 | matcher_t::may_skip_t skip = matcher.may_skip (c, info); |
417 | if (unlikely (skip == matcher_t::SKIP_YES)) |
418 | continue; |
419 | |
420 | matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); |
421 | if (match == matcher_t::MATCH_YES || |
422 | (match == matcher_t::MATCH_MAYBE && |
423 | skip == matcher_t::SKIP_NO)) |
424 | { |
425 | num_items--; |
426 | match_glyph_data++; |
427 | return true; |
428 | } |
429 | |
430 | if (skip == matcher_t::SKIP_NO) |
431 | return false; |
432 | } |
433 | return false; |
434 | } |
435 | |
436 | unsigned int idx; |
437 | protected: |
438 | hb_ot_apply_context_t *c; |
439 | matcher_t matcher; |
440 | const HBUINT16 *match_glyph_data; |
441 | |
442 | unsigned int num_items; |
443 | unsigned int end; |
444 | }; |
445 | |
446 | |
447 | inline const char *get_name (void) { return "APPLY" ; } |
448 | typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index); |
449 | template <typename T> |
450 | inline return_t dispatch (const T &obj) { return obj.apply (this); } |
451 | static return_t default_return_value (void) { return false; } |
452 | bool stop_sublookup_iteration (return_t r) const { return r; } |
453 | return_t recurse (unsigned int sub_lookup_index) |
454 | { |
455 | if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0)) |
456 | return default_return_value (); |
457 | |
458 | nesting_level_left--; |
459 | bool ret = recurse_func (this, sub_lookup_index); |
460 | nesting_level_left++; |
461 | return ret; |
462 | } |
463 | |
464 | skipping_iterator_t iter_input, iter_context; |
465 | |
466 | hb_font_t *font; |
467 | hb_face_t *face; |
468 | hb_buffer_t *buffer; |
469 | recurse_func_t recurse_func; |
470 | const GDEF &gdef; |
471 | const VariationStore &var_store; |
472 | |
473 | hb_direction_t direction; |
474 | hb_mask_t lookup_mask; |
475 | unsigned int table_index; /* GSUB/GPOS */ |
476 | unsigned int lookup_index; |
477 | unsigned int lookup_props; |
478 | unsigned int nesting_level_left; |
479 | unsigned int debug_depth; |
480 | |
481 | bool auto_zwnj; |
482 | bool auto_zwj; |
483 | bool has_glyph_classes; |
484 | |
485 | |
486 | hb_ot_apply_context_t (unsigned int table_index_, |
487 | hb_font_t *font_, |
488 | hb_buffer_t *buffer_) : |
489 | iter_input (), iter_context (), |
490 | font (font_), face (font->face), buffer (buffer_), |
491 | recurse_func (nullptr), |
492 | gdef (_get_gdef (face)), |
493 | var_store (gdef.get_var_store ()), |
494 | direction (buffer_->props.direction), |
495 | lookup_mask (1), |
496 | table_index (table_index_), |
497 | lookup_index ((unsigned int) -1), |
498 | lookup_props (0), |
499 | nesting_level_left (HB_MAX_NESTING_LEVEL), |
500 | debug_depth (0), |
501 | auto_zwnj (true), |
502 | auto_zwj (true), |
503 | has_glyph_classes (gdef.has_glyph_classes ()) {} |
504 | |
505 | inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; } |
506 | inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; } |
507 | inline void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; } |
508 | inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } |
509 | inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } |
510 | inline void set_lookup_props (unsigned int lookup_props_) |
511 | { |
512 | lookup_props = lookup_props_; |
513 | iter_input.init (this, false); |
514 | iter_context.init (this, true); |
515 | } |
516 | |
517 | inline bool |
518 | match_properties_mark (hb_codepoint_t glyph, |
519 | unsigned int glyph_props, |
520 | unsigned int match_props) const |
521 | { |
522 | /* If using mark filtering sets, the high short of |
523 | * match_props has the set index. |
524 | */ |
525 | if (match_props & LookupFlag::UseMarkFilteringSet) |
526 | return gdef.mark_set_covers (match_props >> 16, glyph); |
527 | |
528 | /* The second byte of match_props has the meaning |
529 | * "ignore marks of attachment type different than |
530 | * the attachment type specified." |
531 | */ |
532 | if (match_props & LookupFlag::MarkAttachmentType) |
533 | return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); |
534 | |
535 | return true; |
536 | } |
537 | |
538 | inline bool |
539 | check_glyph_property (const hb_glyph_info_t *info, |
540 | unsigned int match_props) const |
541 | { |
542 | hb_codepoint_t glyph = info->codepoint; |
543 | unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); |
544 | |
545 | /* Not covered, if, for example, glyph class is ligature and |
546 | * match_props includes LookupFlags::IgnoreLigatures |
547 | */ |
548 | if (glyph_props & match_props & LookupFlag::IgnoreFlags) |
549 | return false; |
550 | |
551 | if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) |
552 | return match_properties_mark (glyph, glyph_props, match_props); |
553 | |
554 | return true; |
555 | } |
556 | |
557 | inline void _set_glyph_props (hb_codepoint_t glyph_index, |
558 | unsigned int class_guess = 0, |
559 | bool ligature = false, |
560 | bool component = false) const |
561 | { |
562 | unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) & |
563 | HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; |
564 | add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; |
565 | if (ligature) |
566 | { |
567 | add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; |
568 | /* In the only place that the MULTIPLIED bit is used, Uniscribe |
569 | * seems to only care about the "last" transformation between |
570 | * Ligature and Multiple substitions. Ie. if you ligate, expand, |
571 | * and ligate again, it forgives the multiplication and acts as |
572 | * if only ligation happened. As such, clear MULTIPLIED bit. |
573 | */ |
574 | add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; |
575 | } |
576 | if (component) |
577 | add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; |
578 | if (likely (has_glyph_classes)) |
579 | _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index)); |
580 | else if (class_guess) |
581 | _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess); |
582 | } |
583 | |
584 | inline void replace_glyph (hb_codepoint_t glyph_index) const |
585 | { |
586 | _set_glyph_props (glyph_index); |
587 | buffer->replace_glyph (glyph_index); |
588 | } |
589 | inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const |
590 | { |
591 | _set_glyph_props (glyph_index); |
592 | buffer->cur().codepoint = glyph_index; |
593 | } |
594 | inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index, |
595 | unsigned int class_guess) const |
596 | { |
597 | _set_glyph_props (glyph_index, class_guess, true); |
598 | buffer->replace_glyph (glyph_index); |
599 | } |
600 | inline void output_glyph_for_component (hb_codepoint_t glyph_index, |
601 | unsigned int class_guess) const |
602 | { |
603 | _set_glyph_props (glyph_index, class_guess, false, true); |
604 | buffer->output_glyph (glyph_index); |
605 | } |
606 | }; |
607 | |
608 | |
609 | |
610 | typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data); |
611 | typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data); |
612 | typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data); |
613 | |
614 | struct ContextClosureFuncs |
615 | { |
616 | intersects_func_t intersects; |
617 | }; |
618 | struct ContextCollectGlyphsFuncs |
619 | { |
620 | collect_glyphs_func_t collect; |
621 | }; |
622 | struct ContextApplyFuncs |
623 | { |
624 | match_func_t match; |
625 | }; |
626 | |
627 | |
628 | static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED) |
629 | { |
630 | return glyphs->has (value); |
631 | } |
632 | static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data) |
633 | { |
634 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
635 | return class_def.intersects_class (glyphs, value); |
636 | } |
637 | static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data) |
638 | { |
639 | const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; |
640 | return (data+coverage).intersects (glyphs); |
641 | } |
642 | |
643 | static inline bool intersects_array (const hb_set_t *glyphs, |
644 | unsigned int count, |
645 | const HBUINT16 values[], |
646 | intersects_func_t intersects_func, |
647 | const void *intersects_data) |
648 | { |
649 | for (unsigned int i = 0; i < count; i++) |
650 | if (likely (!intersects_func (glyphs, values[i], intersects_data))) |
651 | return false; |
652 | return true; |
653 | } |
654 | |
655 | |
656 | static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED) |
657 | { |
658 | glyphs->add (value); |
659 | } |
660 | static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data) |
661 | { |
662 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
663 | class_def.add_class (glyphs, value); |
664 | } |
665 | static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data) |
666 | { |
667 | const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; |
668 | (data+coverage).add_coverage (glyphs); |
669 | } |
670 | static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, |
671 | hb_set_t *glyphs, |
672 | unsigned int count, |
673 | const HBUINT16 values[], |
674 | collect_glyphs_func_t collect_func, |
675 | const void *collect_data) |
676 | { |
677 | for (unsigned int i = 0; i < count; i++) |
678 | collect_func (glyphs, values[i], collect_data); |
679 | } |
680 | |
681 | |
682 | static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED) |
683 | { |
684 | return glyph_id == value; |
685 | } |
686 | static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data) |
687 | { |
688 | const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); |
689 | return class_def.get_class (glyph_id) == value; |
690 | } |
691 | static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data) |
692 | { |
693 | const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; |
694 | return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; |
695 | } |
696 | |
697 | static inline bool would_match_input (hb_would_apply_context_t *c, |
698 | unsigned int count, /* Including the first glyph (not matched) */ |
699 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
700 | match_func_t match_func, |
701 | const void *match_data) |
702 | { |
703 | if (count != c->len) |
704 | return false; |
705 | |
706 | for (unsigned int i = 1; i < count; i++) |
707 | if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) |
708 | return false; |
709 | |
710 | return true; |
711 | } |
712 | static inline bool match_input (hb_ot_apply_context_t *c, |
713 | unsigned int count, /* Including the first glyph (not matched) */ |
714 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
715 | match_func_t match_func, |
716 | const void *match_data, |
717 | unsigned int *end_offset, |
718 | unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], |
719 | bool *p_is_mark_ligature = nullptr, |
720 | unsigned int *p_total_component_count = nullptr) |
721 | { |
722 | TRACE_APPLY (nullptr); |
723 | |
724 | if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false); |
725 | |
726 | hb_buffer_t *buffer = c->buffer; |
727 | |
728 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
729 | skippy_iter.reset (buffer->idx, count - 1); |
730 | skippy_iter.set_match_func (match_func, match_data, input); |
731 | |
732 | /* |
733 | * This is perhaps the trickiest part of OpenType... Remarks: |
734 | * |
735 | * - If all components of the ligature were marks, we call this a mark ligature. |
736 | * |
737 | * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize |
738 | * it as a ligature glyph. |
739 | * |
740 | * - Ligatures cannot be formed across glyphs attached to different components |
741 | * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and |
742 | * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. |
743 | * However, it would be wrong to ligate that SHADDA,FATHA sequence. |
744 | * There are a couple of exceptions to this: |
745 | * |
746 | * o If a ligature tries ligating with marks that belong to it itself, go ahead, |
747 | * assuming that the font designer knows what they are doing (otherwise it can |
748 | * break Indic stuff when a matra wants to ligate with a conjunct, |
749 | * |
750 | * o If two marks want to ligate and they belong to different components of the |
751 | * same ligature glyph, and said ligature glyph is to be ignored according to |
752 | * mark-filtering rules, then allow. |
753 | * https://github.com/harfbuzz/harfbuzz/issues/545 |
754 | */ |
755 | |
756 | bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur()); |
757 | |
758 | unsigned int total_component_count = 0; |
759 | total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
760 | |
761 | unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
762 | unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
763 | |
764 | enum { |
765 | LIGBASE_NOT_CHECKED, |
766 | LIGBASE_MAY_NOT_SKIP, |
767 | LIGBASE_MAY_SKIP |
768 | } ligbase = LIGBASE_NOT_CHECKED; |
769 | |
770 | match_positions[0] = buffer->idx; |
771 | for (unsigned int i = 1; i < count; i++) |
772 | { |
773 | if (!skippy_iter.next ()) return_trace (false); |
774 | |
775 | match_positions[i] = skippy_iter.idx; |
776 | |
777 | unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); |
778 | unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); |
779 | |
780 | if (first_lig_id && first_lig_comp) |
781 | { |
782 | /* If first component was attached to a previous ligature component, |
783 | * all subsequent components should be attached to the same ligature |
784 | * component, otherwise we shouldn't ligate them... */ |
785 | if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) |
786 | { |
787 | /* ...unless, we are attached to a base ligature and that base |
788 | * ligature is ignorable. */ |
789 | if (ligbase == LIGBASE_NOT_CHECKED) |
790 | { |
791 | bool found = false; |
792 | const hb_glyph_info_t *out = buffer->out_info; |
793 | unsigned int j = buffer->out_len; |
794 | while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id) |
795 | { |
796 | if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0) |
797 | { |
798 | j--; |
799 | found = true; |
800 | break; |
801 | } |
802 | j--; |
803 | } |
804 | |
805 | if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES) |
806 | ligbase = LIGBASE_MAY_SKIP; |
807 | else |
808 | ligbase = LIGBASE_MAY_NOT_SKIP; |
809 | } |
810 | |
811 | if (ligbase == LIGBASE_MAY_NOT_SKIP) |
812 | return_trace (false); |
813 | } |
814 | } |
815 | else |
816 | { |
817 | /* If first component was NOT attached to a previous ligature component, |
818 | * all subsequent components should also NOT be attached to any ligature |
819 | * component, unless they are attached to the first component itself! */ |
820 | if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) |
821 | return_trace (false); |
822 | } |
823 | |
824 | is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]); |
825 | total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); |
826 | } |
827 | |
828 | *end_offset = skippy_iter.idx - buffer->idx + 1; |
829 | |
830 | if (p_is_mark_ligature) |
831 | *p_is_mark_ligature = is_mark_ligature; |
832 | |
833 | if (p_total_component_count) |
834 | *p_total_component_count = total_component_count; |
835 | |
836 | return_trace (true); |
837 | } |
838 | static inline bool ligate_input (hb_ot_apply_context_t *c, |
839 | unsigned int count, /* Including the first glyph */ |
840 | unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ |
841 | unsigned int match_length, |
842 | hb_codepoint_t lig_glyph, |
843 | bool is_mark_ligature, |
844 | unsigned int total_component_count) |
845 | { |
846 | TRACE_APPLY (nullptr); |
847 | |
848 | hb_buffer_t *buffer = c->buffer; |
849 | |
850 | buffer->merge_clusters (buffer->idx, buffer->idx + match_length); |
851 | |
852 | /* |
853 | * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave |
854 | * the ligature to keep its old ligature id. This will allow it to attach to |
855 | * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, |
856 | * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a |
857 | * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature |
858 | * later, we don't want them to lose their ligature id/component, otherwise |
859 | * GPOS will fail to correctly position the mark ligature on top of the |
860 | * LAM,LAM,HEH ligature. See: |
861 | * https://bugzilla.gnome.org/show_bug.cgi?id=676343 |
862 | * |
863 | * - If a ligature is formed of components that some of which are also ligatures |
864 | * themselves, and those ligature components had marks attached to *their* |
865 | * components, we have to attach the marks to the new ligature component |
866 | * positions! Now *that*'s tricky! And these marks may be following the |
867 | * last component of the whole sequence, so we should loop forward looking |
868 | * for them and update them. |
869 | * |
870 | * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a |
871 | * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature |
872 | * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature |
873 | * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to |
874 | * the new ligature with a component value of 2. |
875 | * |
876 | * This in fact happened to a font... See: |
877 | * https://bugzilla.gnome.org/show_bug.cgi?id=437633 |
878 | */ |
879 | |
880 | unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; |
881 | unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer); |
882 | unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
883 | unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
884 | unsigned int components_so_far = last_num_components; |
885 | |
886 | if (!is_mark_ligature) |
887 | { |
888 | _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); |
889 | if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) |
890 | { |
891 | _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); |
892 | } |
893 | } |
894 | c->replace_glyph_with_ligature (lig_glyph, klass); |
895 | |
896 | for (unsigned int i = 1; i < count; i++) |
897 | { |
898 | while (buffer->idx < match_positions[i] && buffer->successful) |
899 | { |
900 | if (!is_mark_ligature) { |
901 | unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
902 | if (this_comp == 0) |
903 | this_comp = last_num_components; |
904 | unsigned int new_lig_comp = components_so_far - last_num_components + |
905 | MIN (this_comp, last_num_components); |
906 | _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); |
907 | } |
908 | buffer->next_glyph (); |
909 | } |
910 | |
911 | last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
912 | last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); |
913 | components_so_far += last_num_components; |
914 | |
915 | /* Skip the base glyph */ |
916 | buffer->idx++; |
917 | } |
918 | |
919 | if (!is_mark_ligature && last_lig_id) { |
920 | /* Re-adjust components for any marks following. */ |
921 | for (unsigned int i = buffer->idx; i < buffer->len; i++) { |
922 | if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) { |
923 | unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]); |
924 | if (!this_comp) |
925 | break; |
926 | unsigned int new_lig_comp = components_so_far - last_num_components + |
927 | MIN (this_comp, last_num_components); |
928 | _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); |
929 | } else |
930 | break; |
931 | } |
932 | } |
933 | return_trace (true); |
934 | } |
935 | |
936 | static inline bool match_backtrack (hb_ot_apply_context_t *c, |
937 | unsigned int count, |
938 | const HBUINT16 backtrack[], |
939 | match_func_t match_func, |
940 | const void *match_data, |
941 | unsigned int *match_start) |
942 | { |
943 | TRACE_APPLY (nullptr); |
944 | |
945 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; |
946 | skippy_iter.reset (c->buffer->backtrack_len (), count); |
947 | skippy_iter.set_match_func (match_func, match_data, backtrack); |
948 | |
949 | for (unsigned int i = 0; i < count; i++) |
950 | if (!skippy_iter.prev ()) |
951 | return_trace (false); |
952 | |
953 | *match_start = skippy_iter.idx; |
954 | |
955 | return_trace (true); |
956 | } |
957 | |
958 | static inline bool match_lookahead (hb_ot_apply_context_t *c, |
959 | unsigned int count, |
960 | const HBUINT16 lookahead[], |
961 | match_func_t match_func, |
962 | const void *match_data, |
963 | unsigned int offset, |
964 | unsigned int *end_index) |
965 | { |
966 | TRACE_APPLY (nullptr); |
967 | |
968 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; |
969 | skippy_iter.reset (c->buffer->idx + offset - 1, count); |
970 | skippy_iter.set_match_func (match_func, match_data, lookahead); |
971 | |
972 | for (unsigned int i = 0; i < count; i++) |
973 | if (!skippy_iter.next ()) |
974 | return_trace (false); |
975 | |
976 | *end_index = skippy_iter.idx + 1; |
977 | |
978 | return_trace (true); |
979 | } |
980 | |
981 | |
982 | |
983 | struct LookupRecord |
984 | { |
985 | inline bool sanitize (hb_sanitize_context_t *c) const |
986 | { |
987 | TRACE_SANITIZE (this); |
988 | return_trace (c->check_struct (this)); |
989 | } |
990 | |
991 | HBUINT16 sequenceIndex; /* Index into current glyph |
992 | * sequence--first glyph = 0 */ |
993 | HBUINT16 lookupListIndex; /* Lookup to apply to that |
994 | * position--zero--based */ |
995 | public: |
996 | DEFINE_SIZE_STATIC (4); |
997 | }; |
998 | |
999 | template <typename context_t> |
1000 | static inline void recurse_lookups (context_t *c, |
1001 | unsigned int lookupCount, |
1002 | const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) |
1003 | { |
1004 | for (unsigned int i = 0; i < lookupCount; i++) |
1005 | c->recurse (lookupRecord[i].lookupListIndex); |
1006 | } |
1007 | |
1008 | static inline bool apply_lookup (hb_ot_apply_context_t *c, |
1009 | unsigned int count, /* Including the first glyph */ |
1010 | unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ |
1011 | unsigned int lookupCount, |
1012 | const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ |
1013 | unsigned int match_length) |
1014 | { |
1015 | TRACE_APPLY (nullptr); |
1016 | |
1017 | hb_buffer_t *buffer = c->buffer; |
1018 | int end; |
1019 | |
1020 | /* All positions are distance from beginning of *output* buffer. |
1021 | * Adjust. */ |
1022 | { |
1023 | unsigned int bl = buffer->backtrack_len (); |
1024 | end = bl + match_length; |
1025 | |
1026 | int delta = bl - buffer->idx; |
1027 | /* Convert positions to new indexing. */ |
1028 | for (unsigned int j = 0; j < count; j++) |
1029 | match_positions[j] += delta; |
1030 | } |
1031 | |
1032 | for (unsigned int i = 0; i < lookupCount && buffer->successful; i++) |
1033 | { |
1034 | unsigned int idx = lookupRecord[i].sequenceIndex; |
1035 | if (idx >= count) |
1036 | continue; |
1037 | |
1038 | /* Don't recurse to ourself at same position. |
1039 | * Note that this test is too naive, it doesn't catch longer loops. */ |
1040 | if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index) |
1041 | continue; |
1042 | |
1043 | if (unlikely (!buffer->move_to (match_positions[idx]))) |
1044 | break; |
1045 | |
1046 | if (unlikely (buffer->max_ops <= 0)) |
1047 | break; |
1048 | |
1049 | unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); |
1050 | if (!c->recurse (lookupRecord[i].lookupListIndex)) |
1051 | continue; |
1052 | |
1053 | unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); |
1054 | int delta = new_len - orig_len; |
1055 | |
1056 | if (!delta) |
1057 | continue; |
1058 | |
1059 | /* Recursed lookup changed buffer len. Adjust. |
1060 | * |
1061 | * TODO: |
1062 | * |
1063 | * Right now, if buffer length increased by n, we assume n new glyphs |
1064 | * were added right after the current position, and if buffer length |
1065 | * was decreased by n, we assume n match positions after the current |
1066 | * one where removed. The former (buffer length increased) case is |
1067 | * fine, but the decrease case can be improved in at least two ways, |
1068 | * both of which are significant: |
1069 | * |
1070 | * - If recursed-to lookup is MultipleSubst and buffer length |
1071 | * decreased, then it's current match position that was deleted, |
1072 | * NOT the one after it. |
1073 | * |
1074 | * - If buffer length was decreased by n, it does not necessarily |
1075 | * mean that n match positions where removed, as there might |
1076 | * have been marks and default-ignorables in the sequence. We |
1077 | * should instead drop match positions between current-position |
1078 | * and current-position + n instead. |
1079 | * |
1080 | * It should be possible to construct tests for both of these cases. |
1081 | */ |
1082 | |
1083 | end += delta; |
1084 | if (end <= int (match_positions[idx])) |
1085 | { |
1086 | /* End might end up being smaller than match_positions[idx] if the recursed |
1087 | * lookup ended up removing many items, more than we have had matched. |
1088 | * Just never rewind end back and get out of here. |
1089 | * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */ |
1090 | end = match_positions[idx]; |
1091 | /* There can't be any further changes. */ |
1092 | break; |
1093 | } |
1094 | |
1095 | unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ |
1096 | |
1097 | if (delta > 0) |
1098 | { |
1099 | if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH)) |
1100 | break; |
1101 | } |
1102 | else |
1103 | { |
1104 | /* NOTE: delta is negative. */ |
1105 | delta = MAX (delta, (int) next - (int) count); |
1106 | next -= delta; |
1107 | } |
1108 | |
1109 | /* Shift! */ |
1110 | memmove (match_positions + next + delta, match_positions + next, |
1111 | (count - next) * sizeof (match_positions[0])); |
1112 | next += delta; |
1113 | count += delta; |
1114 | |
1115 | /* Fill in new entries. */ |
1116 | for (unsigned int j = idx + 1; j < next; j++) |
1117 | match_positions[j] = match_positions[j - 1] + 1; |
1118 | |
1119 | /* And fixup the rest. */ |
1120 | for (; next < count; next++) |
1121 | match_positions[next] += delta; |
1122 | } |
1123 | |
1124 | buffer->move_to (end); |
1125 | |
1126 | return_trace (true); |
1127 | } |
1128 | |
1129 | |
1130 | |
1131 | /* Contextual lookups */ |
1132 | |
1133 | struct ContextClosureLookupContext |
1134 | { |
1135 | ContextClosureFuncs funcs; |
1136 | const void *intersects_data; |
1137 | }; |
1138 | |
1139 | struct ContextCollectGlyphsLookupContext |
1140 | { |
1141 | ContextCollectGlyphsFuncs funcs; |
1142 | const void *collect_data; |
1143 | }; |
1144 | |
1145 | struct ContextApplyLookupContext |
1146 | { |
1147 | ContextApplyFuncs funcs; |
1148 | const void *match_data; |
1149 | }; |
1150 | |
1151 | static inline bool context_intersects (const hb_set_t *glyphs, |
1152 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1153 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1154 | ContextClosureLookupContext &lookup_context) |
1155 | { |
1156 | return intersects_array (glyphs, |
1157 | inputCount ? inputCount - 1 : 0, input, |
1158 | lookup_context.funcs.intersects, lookup_context.intersects_data); |
1159 | } |
1160 | |
1161 | static inline void context_closure_lookup (hb_closure_context_t *c, |
1162 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1163 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1164 | unsigned int lookupCount, |
1165 | const LookupRecord lookupRecord[], |
1166 | ContextClosureLookupContext &lookup_context) |
1167 | { |
1168 | if (context_intersects (c->glyphs, |
1169 | inputCount, input, |
1170 | lookup_context)) |
1171 | recurse_lookups (c, |
1172 | lookupCount, lookupRecord); |
1173 | } |
1174 | |
1175 | static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, |
1176 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1177 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1178 | unsigned int lookupCount, |
1179 | const LookupRecord lookupRecord[], |
1180 | ContextCollectGlyphsLookupContext &lookup_context) |
1181 | { |
1182 | collect_array (c, c->input, |
1183 | inputCount ? inputCount - 1 : 0, input, |
1184 | lookup_context.funcs.collect, lookup_context.collect_data); |
1185 | recurse_lookups (c, |
1186 | lookupCount, lookupRecord); |
1187 | } |
1188 | |
1189 | static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, |
1190 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1191 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1192 | unsigned int lookupCount HB_UNUSED, |
1193 | const LookupRecord lookupRecord[] HB_UNUSED, |
1194 | ContextApplyLookupContext &lookup_context) |
1195 | { |
1196 | return would_match_input (c, |
1197 | inputCount, input, |
1198 | lookup_context.funcs.match, lookup_context.match_data); |
1199 | } |
1200 | static inline bool context_apply_lookup (hb_ot_apply_context_t *c, |
1201 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1202 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1203 | unsigned int lookupCount, |
1204 | const LookupRecord lookupRecord[], |
1205 | ContextApplyLookupContext &lookup_context) |
1206 | { |
1207 | unsigned int match_length = 0; |
1208 | unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; |
1209 | return match_input (c, |
1210 | inputCount, input, |
1211 | lookup_context.funcs.match, lookup_context.match_data, |
1212 | &match_length, match_positions) |
1213 | && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length), |
1214 | apply_lookup (c, |
1215 | inputCount, match_positions, |
1216 | lookupCount, lookupRecord, |
1217 | match_length)); |
1218 | } |
1219 | |
1220 | struct Rule |
1221 | { |
1222 | inline bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const |
1223 | { |
1224 | return context_intersects (glyphs, |
1225 | inputCount, inputZ, |
1226 | lookup_context); |
1227 | } |
1228 | |
1229 | inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const |
1230 | { |
1231 | TRACE_CLOSURE (this); |
1232 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
1233 | context_closure_lookup (c, |
1234 | inputCount, inputZ, |
1235 | lookupCount, lookupRecord, |
1236 | lookup_context); |
1237 | } |
1238 | |
1239 | inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const |
1240 | { |
1241 | TRACE_COLLECT_GLYPHS (this); |
1242 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
1243 | context_collect_glyphs_lookup (c, |
1244 | inputCount, inputZ, |
1245 | lookupCount, lookupRecord, |
1246 | lookup_context); |
1247 | } |
1248 | |
1249 | inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
1250 | { |
1251 | TRACE_WOULD_APPLY (this); |
1252 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
1253 | return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); |
1254 | } |
1255 | |
1256 | inline bool apply (hb_ot_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
1257 | { |
1258 | TRACE_APPLY (this); |
1259 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); |
1260 | return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); |
1261 | } |
1262 | |
1263 | public: |
1264 | inline bool sanitize (hb_sanitize_context_t *c) const |
1265 | { |
1266 | TRACE_SANITIZE (this); |
1267 | return_trace (inputCount.sanitize (c) && |
1268 | lookupCount.sanitize (c) && |
1269 | c->check_range (inputZ, |
1270 | inputZ[0].static_size * inputCount + |
1271 | LookupRecord::static_size * lookupCount)); |
1272 | } |
1273 | |
1274 | protected: |
1275 | HBUINT16 inputCount; /* Total number of glyphs in input |
1276 | * glyph sequence--includes the first |
1277 | * glyph */ |
1278 | HBUINT16 lookupCount; /* Number of LookupRecords */ |
1279 | HBUINT16 inputZ[VAR]; /* Array of match inputs--start with |
1280 | * second glyph */ |
1281 | /*LookupRecord lookupRecordX[VAR];*/ /* Array of LookupRecords--in |
1282 | * design order */ |
1283 | public: |
1284 | DEFINE_SIZE_ARRAY (4, inputZ); |
1285 | }; |
1286 | |
1287 | struct RuleSet |
1288 | { |
1289 | inline bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const |
1290 | { |
1291 | unsigned int num_rules = rule.len; |
1292 | for (unsigned int i = 0; i < num_rules; i++) |
1293 | if ((this+rule[i]).intersects (glyphs, lookup_context)) |
1294 | return true; |
1295 | return false; |
1296 | } |
1297 | |
1298 | inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const |
1299 | { |
1300 | TRACE_CLOSURE (this); |
1301 | unsigned int num_rules = rule.len; |
1302 | for (unsigned int i = 0; i < num_rules; i++) |
1303 | (this+rule[i]).closure (c, lookup_context); |
1304 | } |
1305 | |
1306 | inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const |
1307 | { |
1308 | TRACE_COLLECT_GLYPHS (this); |
1309 | unsigned int num_rules = rule.len; |
1310 | for (unsigned int i = 0; i < num_rules; i++) |
1311 | (this+rule[i]).collect_glyphs (c, lookup_context); |
1312 | } |
1313 | |
1314 | inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
1315 | { |
1316 | TRACE_WOULD_APPLY (this); |
1317 | unsigned int num_rules = rule.len; |
1318 | for (unsigned int i = 0; i < num_rules; i++) |
1319 | { |
1320 | if ((this+rule[i]).would_apply (c, lookup_context)) |
1321 | return_trace (true); |
1322 | } |
1323 | return_trace (false); |
1324 | } |
1325 | |
1326 | inline bool apply (hb_ot_apply_context_t *c, ContextApplyLookupContext &lookup_context) const |
1327 | { |
1328 | TRACE_APPLY (this); |
1329 | unsigned int num_rules = rule.len; |
1330 | for (unsigned int i = 0; i < num_rules; i++) |
1331 | { |
1332 | if ((this+rule[i]).apply (c, lookup_context)) |
1333 | return_trace (true); |
1334 | } |
1335 | return_trace (false); |
1336 | } |
1337 | |
1338 | inline bool sanitize (hb_sanitize_context_t *c) const |
1339 | { |
1340 | TRACE_SANITIZE (this); |
1341 | return_trace (rule.sanitize (c, this)); |
1342 | } |
1343 | |
1344 | protected: |
1345 | OffsetArrayOf<Rule> |
1346 | rule; /* Array of Rule tables |
1347 | * ordered by preference */ |
1348 | public: |
1349 | DEFINE_SIZE_ARRAY (2, rule); |
1350 | }; |
1351 | |
1352 | |
1353 | struct ContextFormat1 |
1354 | { |
1355 | inline bool intersects (const hb_set_t *glyphs) const |
1356 | { |
1357 | struct ContextClosureLookupContext lookup_context = { |
1358 | {intersects_glyph}, |
1359 | nullptr |
1360 | }; |
1361 | |
1362 | unsigned int count = ruleSet.len; |
1363 | for (hb_auto_t<Coverage::Iter> iter (this+coverage); iter.more (); iter.next ()) |
1364 | { |
1365 | if (unlikely (iter.get_coverage () >= count)) |
1366 | break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ |
1367 | if (glyphs->has (iter.get_glyph ()) && |
1368 | (this+ruleSet[iter.get_coverage ()]).intersects (glyphs, lookup_context)) |
1369 | return true; |
1370 | } |
1371 | return false; |
1372 | } |
1373 | |
1374 | inline void closure (hb_closure_context_t *c) const |
1375 | { |
1376 | TRACE_CLOSURE (this); |
1377 | |
1378 | struct ContextClosureLookupContext lookup_context = { |
1379 | {intersects_glyph}, |
1380 | nullptr |
1381 | }; |
1382 | |
1383 | unsigned int count = ruleSet.len; |
1384 | for (hb_auto_t<Coverage::Iter> iter (this+coverage); iter.more (); iter.next ()) |
1385 | { |
1386 | if (unlikely (iter.get_coverage () >= count)) |
1387 | break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ |
1388 | if (c->glyphs->has (iter.get_glyph ())) |
1389 | (this+ruleSet[iter.get_coverage ()]).closure (c, lookup_context); |
1390 | } |
1391 | } |
1392 | |
1393 | inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
1394 | { |
1395 | TRACE_COLLECT_GLYPHS (this); |
1396 | (this+coverage).add_coverage (c->input); |
1397 | |
1398 | struct ContextCollectGlyphsLookupContext lookup_context = { |
1399 | {collect_glyph}, |
1400 | nullptr |
1401 | }; |
1402 | |
1403 | unsigned int count = ruleSet.len; |
1404 | for (unsigned int i = 0; i < count; i++) |
1405 | (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
1406 | } |
1407 | |
1408 | inline bool would_apply (hb_would_apply_context_t *c) const |
1409 | { |
1410 | TRACE_WOULD_APPLY (this); |
1411 | |
1412 | const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; |
1413 | struct ContextApplyLookupContext lookup_context = { |
1414 | {match_glyph}, |
1415 | nullptr |
1416 | }; |
1417 | return_trace (rule_set.would_apply (c, lookup_context)); |
1418 | } |
1419 | |
1420 | inline const Coverage &get_coverage (void) const |
1421 | { return this+coverage; } |
1422 | |
1423 | inline bool apply (hb_ot_apply_context_t *c) const |
1424 | { |
1425 | TRACE_APPLY (this); |
1426 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
1427 | if (likely (index == NOT_COVERED)) |
1428 | return_trace (false); |
1429 | |
1430 | const RuleSet &rule_set = this+ruleSet[index]; |
1431 | struct ContextApplyLookupContext lookup_context = { |
1432 | {match_glyph}, |
1433 | nullptr |
1434 | }; |
1435 | return_trace (rule_set.apply (c, lookup_context)); |
1436 | } |
1437 | |
1438 | inline bool subset (hb_subset_context_t *c) const |
1439 | { |
1440 | TRACE_SUBSET (this); |
1441 | // TODO(subset) |
1442 | return_trace (false); |
1443 | } |
1444 | |
1445 | inline bool sanitize (hb_sanitize_context_t *c) const |
1446 | { |
1447 | TRACE_SANITIZE (this); |
1448 | return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); |
1449 | } |
1450 | |
1451 | protected: |
1452 | HBUINT16 format; /* Format identifier--format = 1 */ |
1453 | OffsetTo<Coverage> |
1454 | coverage; /* Offset to Coverage table--from |
1455 | * beginning of table */ |
1456 | OffsetArrayOf<RuleSet> |
1457 | ruleSet; /* Array of RuleSet tables |
1458 | * ordered by Coverage Index */ |
1459 | public: |
1460 | DEFINE_SIZE_ARRAY (6, ruleSet); |
1461 | }; |
1462 | |
1463 | |
1464 | struct ContextFormat2 |
1465 | { |
1466 | inline bool intersects (const hb_set_t *glyphs) const |
1467 | { |
1468 | if (!(this+coverage).intersects (glyphs)) |
1469 | return false; |
1470 | |
1471 | const ClassDef &class_def = this+classDef; |
1472 | |
1473 | struct ContextClosureLookupContext lookup_context = { |
1474 | {intersects_class}, |
1475 | &class_def |
1476 | }; |
1477 | |
1478 | unsigned int count = ruleSet.len; |
1479 | for (unsigned int i = 0; i < count; i++) |
1480 | if (class_def.intersects_class (glyphs, i) && |
1481 | (this+ruleSet[i]).intersects (glyphs, lookup_context)) |
1482 | return true; |
1483 | |
1484 | return false; |
1485 | } |
1486 | |
1487 | inline void closure (hb_closure_context_t *c) const |
1488 | { |
1489 | TRACE_CLOSURE (this); |
1490 | if (!(this+coverage).intersects (c->glyphs)) |
1491 | return; |
1492 | |
1493 | const ClassDef &class_def = this+classDef; |
1494 | |
1495 | struct ContextClosureLookupContext lookup_context = { |
1496 | {intersects_class}, |
1497 | &class_def |
1498 | }; |
1499 | |
1500 | unsigned int count = ruleSet.len; |
1501 | for (unsigned int i = 0; i < count; i++) |
1502 | if (class_def.intersects_class (c->glyphs, i)) { |
1503 | const RuleSet &rule_set = this+ruleSet[i]; |
1504 | rule_set.closure (c, lookup_context); |
1505 | } |
1506 | } |
1507 | |
1508 | inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
1509 | { |
1510 | TRACE_COLLECT_GLYPHS (this); |
1511 | (this+coverage).add_coverage (c->input); |
1512 | |
1513 | const ClassDef &class_def = this+classDef; |
1514 | struct ContextCollectGlyphsLookupContext lookup_context = { |
1515 | {collect_class}, |
1516 | &class_def |
1517 | }; |
1518 | |
1519 | unsigned int count = ruleSet.len; |
1520 | for (unsigned int i = 0; i < count; i++) |
1521 | (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
1522 | } |
1523 | |
1524 | inline bool would_apply (hb_would_apply_context_t *c) const |
1525 | { |
1526 | TRACE_WOULD_APPLY (this); |
1527 | |
1528 | const ClassDef &class_def = this+classDef; |
1529 | unsigned int index = class_def.get_class (c->glyphs[0]); |
1530 | const RuleSet &rule_set = this+ruleSet[index]; |
1531 | struct ContextApplyLookupContext lookup_context = { |
1532 | {match_class}, |
1533 | &class_def |
1534 | }; |
1535 | return_trace (rule_set.would_apply (c, lookup_context)); |
1536 | } |
1537 | |
1538 | inline const Coverage &get_coverage (void) const |
1539 | { return this+coverage; } |
1540 | |
1541 | inline bool apply (hb_ot_apply_context_t *c) const |
1542 | { |
1543 | TRACE_APPLY (this); |
1544 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
1545 | if (likely (index == NOT_COVERED)) return_trace (false); |
1546 | |
1547 | const ClassDef &class_def = this+classDef; |
1548 | index = class_def.get_class (c->buffer->cur().codepoint); |
1549 | const RuleSet &rule_set = this+ruleSet[index]; |
1550 | struct ContextApplyLookupContext lookup_context = { |
1551 | {match_class}, |
1552 | &class_def |
1553 | }; |
1554 | return_trace (rule_set.apply (c, lookup_context)); |
1555 | } |
1556 | |
1557 | inline bool subset (hb_subset_context_t *c) const |
1558 | { |
1559 | TRACE_SUBSET (this); |
1560 | // TODO(subset) |
1561 | return_trace (false); |
1562 | } |
1563 | |
1564 | inline bool sanitize (hb_sanitize_context_t *c) const |
1565 | { |
1566 | TRACE_SANITIZE (this); |
1567 | return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); |
1568 | } |
1569 | |
1570 | protected: |
1571 | HBUINT16 format; /* Format identifier--format = 2 */ |
1572 | OffsetTo<Coverage> |
1573 | coverage; /* Offset to Coverage table--from |
1574 | * beginning of table */ |
1575 | OffsetTo<ClassDef> |
1576 | classDef; /* Offset to glyph ClassDef table--from |
1577 | * beginning of table */ |
1578 | OffsetArrayOf<RuleSet> |
1579 | ruleSet; /* Array of RuleSet tables |
1580 | * ordered by class */ |
1581 | public: |
1582 | DEFINE_SIZE_ARRAY (8, ruleSet); |
1583 | }; |
1584 | |
1585 | |
1586 | struct ContextFormat3 |
1587 | { |
1588 | inline bool intersects (const hb_set_t *glyphs) const |
1589 | { |
1590 | if (!(this+coverageZ[0]).intersects (glyphs)) |
1591 | return false; |
1592 | |
1593 | struct ContextClosureLookupContext lookup_context = { |
1594 | {intersects_coverage}, |
1595 | this |
1596 | }; |
1597 | return context_intersects (glyphs, |
1598 | glyphCount, (const HBUINT16 *) (coverageZ + 1), |
1599 | lookup_context); |
1600 | } |
1601 | |
1602 | inline void closure (hb_closure_context_t *c) const |
1603 | { |
1604 | TRACE_CLOSURE (this); |
1605 | if (!(this+coverageZ[0]).intersects (c->glyphs)) |
1606 | return; |
1607 | |
1608 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
1609 | struct ContextClosureLookupContext lookup_context = { |
1610 | {intersects_coverage}, |
1611 | this |
1612 | }; |
1613 | context_closure_lookup (c, |
1614 | glyphCount, (const HBUINT16 *) (coverageZ + 1), |
1615 | lookupCount, lookupRecord, |
1616 | lookup_context); |
1617 | } |
1618 | |
1619 | inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
1620 | { |
1621 | TRACE_COLLECT_GLYPHS (this); |
1622 | (this+coverageZ[0]).add_coverage (c->input); |
1623 | |
1624 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
1625 | struct ContextCollectGlyphsLookupContext lookup_context = { |
1626 | {collect_coverage}, |
1627 | this |
1628 | }; |
1629 | |
1630 | context_collect_glyphs_lookup (c, |
1631 | glyphCount, (const HBUINT16 *) (coverageZ + 1), |
1632 | lookupCount, lookupRecord, |
1633 | lookup_context); |
1634 | } |
1635 | |
1636 | inline bool would_apply (hb_would_apply_context_t *c) const |
1637 | { |
1638 | TRACE_WOULD_APPLY (this); |
1639 | |
1640 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
1641 | struct ContextApplyLookupContext lookup_context = { |
1642 | {match_coverage}, |
1643 | this |
1644 | }; |
1645 | return_trace (context_would_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); |
1646 | } |
1647 | |
1648 | inline const Coverage &get_coverage (void) const |
1649 | { return this+coverageZ[0]; } |
1650 | |
1651 | inline bool apply (hb_ot_apply_context_t *c) const |
1652 | { |
1653 | TRACE_APPLY (this); |
1654 | unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); |
1655 | if (likely (index == NOT_COVERED)) return_trace (false); |
1656 | |
1657 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); |
1658 | struct ContextApplyLookupContext lookup_context = { |
1659 | {match_coverage}, |
1660 | this |
1661 | }; |
1662 | return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); |
1663 | } |
1664 | |
1665 | inline bool subset (hb_subset_context_t *c) const |
1666 | { |
1667 | TRACE_SUBSET (this); |
1668 | // TODO(subset) |
1669 | return_trace (false); |
1670 | } |
1671 | |
1672 | inline bool sanitize (hb_sanitize_context_t *c) const |
1673 | { |
1674 | TRACE_SANITIZE (this); |
1675 | if (!c->check_struct (this)) return_trace (false); |
1676 | unsigned int count = glyphCount; |
1677 | if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */ |
1678 | if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false); |
1679 | for (unsigned int i = 0; i < count; i++) |
1680 | if (!coverageZ[i].sanitize (c, this)) return_trace (false); |
1681 | const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count); |
1682 | return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); |
1683 | } |
1684 | |
1685 | protected: |
1686 | HBUINT16 format; /* Format identifier--format = 3 */ |
1687 | HBUINT16 glyphCount; /* Number of glyphs in the input glyph |
1688 | * sequence */ |
1689 | HBUINT16 lookupCount; /* Number of LookupRecords */ |
1690 | OffsetTo<Coverage> |
1691 | coverageZ[VAR]; /* Array of offsets to Coverage |
1692 | * table in glyph sequence order */ |
1693 | /*LookupRecord lookupRecordX[VAR];*/ /* Array of LookupRecords--in |
1694 | * design order */ |
1695 | public: |
1696 | DEFINE_SIZE_ARRAY (6, coverageZ); |
1697 | }; |
1698 | |
1699 | struct Context |
1700 | { |
1701 | template <typename context_t> |
1702 | inline typename context_t::return_t dispatch (context_t *c) const |
1703 | { |
1704 | TRACE_DISPATCH (this, u.format); |
1705 | if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
1706 | switch (u.format) { |
1707 | case 1: return_trace (c->dispatch (u.format1)); |
1708 | case 2: return_trace (c->dispatch (u.format2)); |
1709 | case 3: return_trace (c->dispatch (u.format3)); |
1710 | default:return_trace (c->default_return_value ()); |
1711 | } |
1712 | } |
1713 | |
1714 | protected: |
1715 | union { |
1716 | HBUINT16 format; /* Format identifier */ |
1717 | ContextFormat1 format1; |
1718 | ContextFormat2 format2; |
1719 | ContextFormat3 format3; |
1720 | } u; |
1721 | }; |
1722 | |
1723 | |
1724 | /* Chaining Contextual lookups */ |
1725 | |
1726 | struct ChainContextClosureLookupContext |
1727 | { |
1728 | ContextClosureFuncs funcs; |
1729 | const void *intersects_data[3]; |
1730 | }; |
1731 | |
1732 | struct ChainContextCollectGlyphsLookupContext |
1733 | { |
1734 | ContextCollectGlyphsFuncs funcs; |
1735 | const void *collect_data[3]; |
1736 | }; |
1737 | |
1738 | struct ChainContextApplyLookupContext |
1739 | { |
1740 | ContextApplyFuncs funcs; |
1741 | const void *match_data[3]; |
1742 | }; |
1743 | |
1744 | static inline bool chain_context_intersects (const hb_set_t *glyphs, |
1745 | unsigned int backtrackCount, |
1746 | const HBUINT16 backtrack[], |
1747 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1748 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1749 | unsigned int lookaheadCount, |
1750 | const HBUINT16 lookahead[], |
1751 | ChainContextClosureLookupContext &lookup_context) |
1752 | { |
1753 | return intersects_array (glyphs, |
1754 | backtrackCount, backtrack, |
1755 | lookup_context.funcs.intersects, lookup_context.intersects_data[0]) |
1756 | && intersects_array (glyphs, |
1757 | inputCount ? inputCount - 1 : 0, input, |
1758 | lookup_context.funcs.intersects, lookup_context.intersects_data[1]) |
1759 | && intersects_array (glyphs, |
1760 | lookaheadCount, lookahead, |
1761 | lookup_context.funcs.intersects, lookup_context.intersects_data[2]); |
1762 | } |
1763 | |
1764 | static inline void chain_context_closure_lookup (hb_closure_context_t *c, |
1765 | unsigned int backtrackCount, |
1766 | const HBUINT16 backtrack[], |
1767 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1768 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1769 | unsigned int lookaheadCount, |
1770 | const HBUINT16 lookahead[], |
1771 | unsigned int lookupCount, |
1772 | const LookupRecord lookupRecord[], |
1773 | ChainContextClosureLookupContext &lookup_context) |
1774 | { |
1775 | if (chain_context_intersects (c->glyphs, |
1776 | backtrackCount, backtrack, |
1777 | inputCount, input, |
1778 | lookaheadCount, lookahead, |
1779 | lookup_context)) |
1780 | recurse_lookups (c, |
1781 | lookupCount, lookupRecord); |
1782 | } |
1783 | |
1784 | static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, |
1785 | unsigned int backtrackCount, |
1786 | const HBUINT16 backtrack[], |
1787 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1788 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1789 | unsigned int lookaheadCount, |
1790 | const HBUINT16 lookahead[], |
1791 | unsigned int lookupCount, |
1792 | const LookupRecord lookupRecord[], |
1793 | ChainContextCollectGlyphsLookupContext &lookup_context) |
1794 | { |
1795 | collect_array (c, c->before, |
1796 | backtrackCount, backtrack, |
1797 | lookup_context.funcs.collect, lookup_context.collect_data[0]); |
1798 | collect_array (c, c->input, |
1799 | inputCount ? inputCount - 1 : 0, input, |
1800 | lookup_context.funcs.collect, lookup_context.collect_data[1]); |
1801 | collect_array (c, c->after, |
1802 | lookaheadCount, lookahead, |
1803 | lookup_context.funcs.collect, lookup_context.collect_data[2]); |
1804 | recurse_lookups (c, |
1805 | lookupCount, lookupRecord); |
1806 | } |
1807 | |
1808 | static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, |
1809 | unsigned int backtrackCount, |
1810 | const HBUINT16 backtrack[] HB_UNUSED, |
1811 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1812 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1813 | unsigned int lookaheadCount, |
1814 | const HBUINT16 lookahead[] HB_UNUSED, |
1815 | unsigned int lookupCount HB_UNUSED, |
1816 | const LookupRecord lookupRecord[] HB_UNUSED, |
1817 | ChainContextApplyLookupContext &lookup_context) |
1818 | { |
1819 | return (c->zero_context ? !backtrackCount && !lookaheadCount : true) |
1820 | && would_match_input (c, |
1821 | inputCount, input, |
1822 | lookup_context.funcs.match, lookup_context.match_data[1]); |
1823 | } |
1824 | |
1825 | static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c, |
1826 | unsigned int backtrackCount, |
1827 | const HBUINT16 backtrack[], |
1828 | unsigned int inputCount, /* Including the first glyph (not matched) */ |
1829 | const HBUINT16 input[], /* Array of input values--start with second glyph */ |
1830 | unsigned int lookaheadCount, |
1831 | const HBUINT16 lookahead[], |
1832 | unsigned int lookupCount, |
1833 | const LookupRecord lookupRecord[], |
1834 | ChainContextApplyLookupContext &lookup_context) |
1835 | { |
1836 | unsigned int start_index = 0, match_length = 0, end_index = 0; |
1837 | unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; |
1838 | return match_input (c, |
1839 | inputCount, input, |
1840 | lookup_context.funcs.match, lookup_context.match_data[1], |
1841 | &match_length, match_positions) |
1842 | && match_backtrack (c, |
1843 | backtrackCount, backtrack, |
1844 | lookup_context.funcs.match, lookup_context.match_data[0], |
1845 | &start_index) |
1846 | && match_lookahead (c, |
1847 | lookaheadCount, lookahead, |
1848 | lookup_context.funcs.match, lookup_context.match_data[2], |
1849 | match_length, &end_index) |
1850 | && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index), |
1851 | apply_lookup (c, |
1852 | inputCount, match_positions, |
1853 | lookupCount, lookupRecord, |
1854 | match_length)); |
1855 | } |
1856 | |
1857 | struct ChainRule |
1858 | { |
1859 | inline bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const |
1860 | { |
1861 | const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack); |
1862 | const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input); |
1863 | return chain_context_intersects (glyphs, |
1864 | backtrack.len, backtrack.arrayZ, |
1865 | input.len, input.arrayZ, |
1866 | lookahead.len, lookahead.arrayZ, |
1867 | lookup_context); |
1868 | } |
1869 | |
1870 | inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const |
1871 | { |
1872 | TRACE_CLOSURE (this); |
1873 | const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack); |
1874 | const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input); |
1875 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
1876 | chain_context_closure_lookup (c, |
1877 | backtrack.len, backtrack.arrayZ, |
1878 | input.len, input.arrayZ, |
1879 | lookahead.len, lookahead.arrayZ, |
1880 | lookup.len, lookup.arrayZ, |
1881 | lookup_context); |
1882 | } |
1883 | |
1884 | inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const |
1885 | { |
1886 | TRACE_COLLECT_GLYPHS (this); |
1887 | const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack); |
1888 | const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input); |
1889 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
1890 | chain_context_collect_glyphs_lookup (c, |
1891 | backtrack.len, backtrack.arrayZ, |
1892 | input.len, input.arrayZ, |
1893 | lookahead.len, lookahead.arrayZ, |
1894 | lookup.len, lookup.arrayZ, |
1895 | lookup_context); |
1896 | } |
1897 | |
1898 | inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
1899 | { |
1900 | TRACE_WOULD_APPLY (this); |
1901 | const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack); |
1902 | const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input); |
1903 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
1904 | return_trace (chain_context_would_apply_lookup (c, |
1905 | backtrack.len, backtrack.arrayZ, |
1906 | input.len, input.arrayZ, |
1907 | lookahead.len, lookahead.arrayZ, lookup.len, |
1908 | lookup.arrayZ, lookup_context)); |
1909 | } |
1910 | |
1911 | inline bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
1912 | { |
1913 | TRACE_APPLY (this); |
1914 | const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack); |
1915 | const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input); |
1916 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
1917 | return_trace (chain_context_apply_lookup (c, |
1918 | backtrack.len, backtrack.arrayZ, |
1919 | input.len, input.arrayZ, |
1920 | lookahead.len, lookahead.arrayZ, lookup.len, |
1921 | lookup.arrayZ, lookup_context)); |
1922 | } |
1923 | |
1924 | inline bool sanitize (hb_sanitize_context_t *c) const |
1925 | { |
1926 | TRACE_SANITIZE (this); |
1927 | if (!backtrack.sanitize (c)) return_trace (false); |
1928 | const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack); |
1929 | if (!input.sanitize (c)) return_trace (false); |
1930 | const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input); |
1931 | if (!lookahead.sanitize (c)) return_trace (false); |
1932 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
1933 | return_trace (lookup.sanitize (c)); |
1934 | } |
1935 | |
1936 | protected: |
1937 | ArrayOf<HBUINT16> |
1938 | backtrack; /* Array of backtracking values |
1939 | * (to be matched before the input |
1940 | * sequence) */ |
1941 | HeadlessArrayOf<HBUINT16> |
1942 | inputX; /* Array of input values (start with |
1943 | * second glyph) */ |
1944 | ArrayOf<HBUINT16> |
1945 | lookaheadX; /* Array of lookahead values's (to be |
1946 | * matched after the input sequence) */ |
1947 | ArrayOf<LookupRecord> |
1948 | lookupX; /* Array of LookupRecords--in |
1949 | * design order) */ |
1950 | public: |
1951 | DEFINE_SIZE_MIN (8); |
1952 | }; |
1953 | |
1954 | struct ChainRuleSet |
1955 | { |
1956 | inline bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const |
1957 | { |
1958 | unsigned int num_rules = rule.len; |
1959 | for (unsigned int i = 0; i < num_rules; i++) |
1960 | if ((this+rule[i]).intersects (glyphs, lookup_context)) |
1961 | return true; |
1962 | return false; |
1963 | } |
1964 | inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const |
1965 | { |
1966 | TRACE_CLOSURE (this); |
1967 | unsigned int num_rules = rule.len; |
1968 | for (unsigned int i = 0; i < num_rules; i++) |
1969 | (this+rule[i]).closure (c, lookup_context); |
1970 | } |
1971 | |
1972 | inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const |
1973 | { |
1974 | TRACE_COLLECT_GLYPHS (this); |
1975 | unsigned int num_rules = rule.len; |
1976 | for (unsigned int i = 0; i < num_rules; i++) |
1977 | (this+rule[i]).collect_glyphs (c, lookup_context); |
1978 | } |
1979 | |
1980 | inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
1981 | { |
1982 | TRACE_WOULD_APPLY (this); |
1983 | unsigned int num_rules = rule.len; |
1984 | for (unsigned int i = 0; i < num_rules; i++) |
1985 | if ((this+rule[i]).would_apply (c, lookup_context)) |
1986 | return_trace (true); |
1987 | |
1988 | return_trace (false); |
1989 | } |
1990 | |
1991 | inline bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const |
1992 | { |
1993 | TRACE_APPLY (this); |
1994 | unsigned int num_rules = rule.len; |
1995 | for (unsigned int i = 0; i < num_rules; i++) |
1996 | if ((this+rule[i]).apply (c, lookup_context)) |
1997 | return_trace (true); |
1998 | |
1999 | return_trace (false); |
2000 | } |
2001 | |
2002 | inline bool sanitize (hb_sanitize_context_t *c) const |
2003 | { |
2004 | TRACE_SANITIZE (this); |
2005 | return_trace (rule.sanitize (c, this)); |
2006 | } |
2007 | |
2008 | protected: |
2009 | OffsetArrayOf<ChainRule> |
2010 | rule; /* Array of ChainRule tables |
2011 | * ordered by preference */ |
2012 | public: |
2013 | DEFINE_SIZE_ARRAY (2, rule); |
2014 | }; |
2015 | |
2016 | struct ChainContextFormat1 |
2017 | { |
2018 | inline bool intersects (const hb_set_t *glyphs) const |
2019 | { |
2020 | struct ChainContextClosureLookupContext lookup_context = { |
2021 | {intersects_glyph}, |
2022 | {nullptr, nullptr, nullptr} |
2023 | }; |
2024 | |
2025 | unsigned int count = ruleSet.len; |
2026 | for (hb_auto_t<Coverage::Iter> iter (this+coverage); iter.more (); iter.next ()) |
2027 | { |
2028 | if (unlikely (iter.get_coverage () >= count)) |
2029 | break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ |
2030 | if (glyphs->has (iter.get_glyph ()) && |
2031 | (this+ruleSet[iter.get_coverage ()]).intersects (glyphs, lookup_context)) |
2032 | return true; |
2033 | } |
2034 | return false; |
2035 | } |
2036 | |
2037 | inline void closure (hb_closure_context_t *c) const |
2038 | { |
2039 | TRACE_CLOSURE (this); |
2040 | |
2041 | struct ChainContextClosureLookupContext lookup_context = { |
2042 | {intersects_glyph}, |
2043 | {nullptr, nullptr, nullptr} |
2044 | }; |
2045 | |
2046 | unsigned int count = ruleSet.len; |
2047 | for (hb_auto_t<Coverage::Iter> iter (this+coverage); iter.more (); iter.next ()) |
2048 | { |
2049 | if (unlikely (iter.get_coverage () >= count)) |
2050 | break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */ |
2051 | if (c->glyphs->has (iter.get_glyph ())) |
2052 | (this+ruleSet[iter.get_coverage ()]).closure (c, lookup_context); |
2053 | } |
2054 | } |
2055 | |
2056 | inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
2057 | { |
2058 | TRACE_COLLECT_GLYPHS (this); |
2059 | (this+coverage).add_coverage (c->input); |
2060 | |
2061 | struct ChainContextCollectGlyphsLookupContext lookup_context = { |
2062 | {collect_glyph}, |
2063 | {nullptr, nullptr, nullptr} |
2064 | }; |
2065 | |
2066 | unsigned int count = ruleSet.len; |
2067 | for (unsigned int i = 0; i < count; i++) |
2068 | (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
2069 | } |
2070 | |
2071 | inline bool would_apply (hb_would_apply_context_t *c) const |
2072 | { |
2073 | TRACE_WOULD_APPLY (this); |
2074 | |
2075 | const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; |
2076 | struct ChainContextApplyLookupContext lookup_context = { |
2077 | {match_glyph}, |
2078 | {nullptr, nullptr, nullptr} |
2079 | }; |
2080 | return_trace (rule_set.would_apply (c, lookup_context)); |
2081 | } |
2082 | |
2083 | inline const Coverage &get_coverage (void) const |
2084 | { return this+coverage; } |
2085 | |
2086 | inline bool apply (hb_ot_apply_context_t *c) const |
2087 | { |
2088 | TRACE_APPLY (this); |
2089 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
2090 | if (likely (index == NOT_COVERED)) return_trace (false); |
2091 | |
2092 | const ChainRuleSet &rule_set = this+ruleSet[index]; |
2093 | struct ChainContextApplyLookupContext lookup_context = { |
2094 | {match_glyph}, |
2095 | {nullptr, nullptr, nullptr} |
2096 | }; |
2097 | return_trace (rule_set.apply (c, lookup_context)); |
2098 | } |
2099 | |
2100 | inline bool subset (hb_subset_context_t *c) const |
2101 | { |
2102 | TRACE_SUBSET (this); |
2103 | // TODO(subset) |
2104 | return_trace (false); |
2105 | } |
2106 | |
2107 | inline bool sanitize (hb_sanitize_context_t *c) const |
2108 | { |
2109 | TRACE_SANITIZE (this); |
2110 | return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); |
2111 | } |
2112 | |
2113 | protected: |
2114 | HBUINT16 format; /* Format identifier--format = 1 */ |
2115 | OffsetTo<Coverage> |
2116 | coverage; /* Offset to Coverage table--from |
2117 | * beginning of table */ |
2118 | OffsetArrayOf<ChainRuleSet> |
2119 | ruleSet; /* Array of ChainRuleSet tables |
2120 | * ordered by Coverage Index */ |
2121 | public: |
2122 | DEFINE_SIZE_ARRAY (6, ruleSet); |
2123 | }; |
2124 | |
2125 | struct ChainContextFormat2 |
2126 | { |
2127 | inline bool intersects (const hb_set_t *glyphs) const |
2128 | { |
2129 | if (!(this+coverage).intersects (glyphs)) |
2130 | return false; |
2131 | |
2132 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
2133 | const ClassDef &input_class_def = this+inputClassDef; |
2134 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
2135 | |
2136 | struct ChainContextClosureLookupContext lookup_context = { |
2137 | {intersects_class}, |
2138 | {&backtrack_class_def, |
2139 | &input_class_def, |
2140 | &lookahead_class_def} |
2141 | }; |
2142 | |
2143 | unsigned int count = ruleSet.len; |
2144 | for (unsigned int i = 0; i < count; i++) |
2145 | if (input_class_def.intersects_class (glyphs, i) && |
2146 | (this+ruleSet[i]).intersects (glyphs, lookup_context)) |
2147 | return true; |
2148 | |
2149 | return false; |
2150 | } |
2151 | inline void closure (hb_closure_context_t *c) const |
2152 | { |
2153 | TRACE_CLOSURE (this); |
2154 | if (!(this+coverage).intersects (c->glyphs)) |
2155 | return; |
2156 | |
2157 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
2158 | const ClassDef &input_class_def = this+inputClassDef; |
2159 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
2160 | |
2161 | struct ChainContextClosureLookupContext lookup_context = { |
2162 | {intersects_class}, |
2163 | {&backtrack_class_def, |
2164 | &input_class_def, |
2165 | &lookahead_class_def} |
2166 | }; |
2167 | |
2168 | unsigned int count = ruleSet.len; |
2169 | for (unsigned int i = 0; i < count; i++) |
2170 | if (input_class_def.intersects_class (c->glyphs, i)) { |
2171 | const ChainRuleSet &rule_set = this+ruleSet[i]; |
2172 | rule_set.closure (c, lookup_context); |
2173 | } |
2174 | } |
2175 | |
2176 | inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
2177 | { |
2178 | TRACE_COLLECT_GLYPHS (this); |
2179 | (this+coverage).add_coverage (c->input); |
2180 | |
2181 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
2182 | const ClassDef &input_class_def = this+inputClassDef; |
2183 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
2184 | |
2185 | struct ChainContextCollectGlyphsLookupContext lookup_context = { |
2186 | {collect_class}, |
2187 | {&backtrack_class_def, |
2188 | &input_class_def, |
2189 | &lookahead_class_def} |
2190 | }; |
2191 | |
2192 | unsigned int count = ruleSet.len; |
2193 | for (unsigned int i = 0; i < count; i++) |
2194 | (this+ruleSet[i]).collect_glyphs (c, lookup_context); |
2195 | } |
2196 | |
2197 | inline bool would_apply (hb_would_apply_context_t *c) const |
2198 | { |
2199 | TRACE_WOULD_APPLY (this); |
2200 | |
2201 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
2202 | const ClassDef &input_class_def = this+inputClassDef; |
2203 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
2204 | |
2205 | unsigned int index = input_class_def.get_class (c->glyphs[0]); |
2206 | const ChainRuleSet &rule_set = this+ruleSet[index]; |
2207 | struct ChainContextApplyLookupContext lookup_context = { |
2208 | {match_class}, |
2209 | {&backtrack_class_def, |
2210 | &input_class_def, |
2211 | &lookahead_class_def} |
2212 | }; |
2213 | return_trace (rule_set.would_apply (c, lookup_context)); |
2214 | } |
2215 | |
2216 | inline const Coverage &get_coverage (void) const |
2217 | { return this+coverage; } |
2218 | |
2219 | inline bool apply (hb_ot_apply_context_t *c) const |
2220 | { |
2221 | TRACE_APPLY (this); |
2222 | unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); |
2223 | if (likely (index == NOT_COVERED)) return_trace (false); |
2224 | |
2225 | const ClassDef &backtrack_class_def = this+backtrackClassDef; |
2226 | const ClassDef &input_class_def = this+inputClassDef; |
2227 | const ClassDef &lookahead_class_def = this+lookaheadClassDef; |
2228 | |
2229 | index = input_class_def.get_class (c->buffer->cur().codepoint); |
2230 | const ChainRuleSet &rule_set = this+ruleSet[index]; |
2231 | struct ChainContextApplyLookupContext lookup_context = { |
2232 | {match_class}, |
2233 | {&backtrack_class_def, |
2234 | &input_class_def, |
2235 | &lookahead_class_def} |
2236 | }; |
2237 | return_trace (rule_set.apply (c, lookup_context)); |
2238 | } |
2239 | |
2240 | inline bool subset (hb_subset_context_t *c) const |
2241 | { |
2242 | TRACE_SUBSET (this); |
2243 | // TODO(subset) |
2244 | return_trace (false); |
2245 | } |
2246 | |
2247 | inline bool sanitize (hb_sanitize_context_t *c) const |
2248 | { |
2249 | TRACE_SANITIZE (this); |
2250 | return_trace (coverage.sanitize (c, this) && |
2251 | backtrackClassDef.sanitize (c, this) && |
2252 | inputClassDef.sanitize (c, this) && |
2253 | lookaheadClassDef.sanitize (c, this) && |
2254 | ruleSet.sanitize (c, this)); |
2255 | } |
2256 | |
2257 | protected: |
2258 | HBUINT16 format; /* Format identifier--format = 2 */ |
2259 | OffsetTo<Coverage> |
2260 | coverage; /* Offset to Coverage table--from |
2261 | * beginning of table */ |
2262 | OffsetTo<ClassDef> |
2263 | backtrackClassDef; /* Offset to glyph ClassDef table |
2264 | * containing backtrack sequence |
2265 | * data--from beginning of table */ |
2266 | OffsetTo<ClassDef> |
2267 | inputClassDef; /* Offset to glyph ClassDef |
2268 | * table containing input sequence |
2269 | * data--from beginning of table */ |
2270 | OffsetTo<ClassDef> |
2271 | lookaheadClassDef; /* Offset to glyph ClassDef table |
2272 | * containing lookahead sequence |
2273 | * data--from beginning of table */ |
2274 | OffsetArrayOf<ChainRuleSet> |
2275 | ruleSet; /* Array of ChainRuleSet tables |
2276 | * ordered by class */ |
2277 | public: |
2278 | DEFINE_SIZE_ARRAY (12, ruleSet); |
2279 | }; |
2280 | |
2281 | struct ChainContextFormat3 |
2282 | { |
2283 | inline bool intersects (const hb_set_t *glyphs) const |
2284 | { |
2285 | const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
2286 | |
2287 | if (!(this+input[0]).intersects (glyphs)) |
2288 | return false; |
2289 | |
2290 | const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
2291 | struct ChainContextClosureLookupContext lookup_context = { |
2292 | {intersects_coverage}, |
2293 | {this, this, this} |
2294 | }; |
2295 | return chain_context_intersects (glyphs, |
2296 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
2297 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
2298 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
2299 | lookup_context); |
2300 | } |
2301 | |
2302 | inline void closure (hb_closure_context_t *c) const |
2303 | { |
2304 | TRACE_CLOSURE (this); |
2305 | const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
2306 | |
2307 | if (!(this+input[0]).intersects (c->glyphs)) |
2308 | return; |
2309 | |
2310 | const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
2311 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
2312 | struct ChainContextClosureLookupContext lookup_context = { |
2313 | {intersects_coverage}, |
2314 | {this, this, this} |
2315 | }; |
2316 | chain_context_closure_lookup (c, |
2317 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
2318 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
2319 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
2320 | lookup.len, lookup.arrayZ, |
2321 | lookup_context); |
2322 | } |
2323 | |
2324 | inline void collect_glyphs (hb_collect_glyphs_context_t *c) const |
2325 | { |
2326 | TRACE_COLLECT_GLYPHS (this); |
2327 | const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
2328 | |
2329 | (this+input[0]).add_coverage (c->input); |
2330 | |
2331 | const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
2332 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
2333 | struct ChainContextCollectGlyphsLookupContext lookup_context = { |
2334 | {collect_coverage}, |
2335 | {this, this, this} |
2336 | }; |
2337 | chain_context_collect_glyphs_lookup (c, |
2338 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
2339 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
2340 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
2341 | lookup.len, lookup.arrayZ, |
2342 | lookup_context); |
2343 | } |
2344 | |
2345 | inline bool would_apply (hb_would_apply_context_t *c) const |
2346 | { |
2347 | TRACE_WOULD_APPLY (this); |
2348 | |
2349 | const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
2350 | const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
2351 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
2352 | struct ChainContextApplyLookupContext lookup_context = { |
2353 | {match_coverage}, |
2354 | {this, this, this} |
2355 | }; |
2356 | return_trace (chain_context_would_apply_lookup (c, |
2357 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
2358 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
2359 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
2360 | lookup.len, lookup.arrayZ, lookup_context)); |
2361 | } |
2362 | |
2363 | inline const Coverage &get_coverage (void) const |
2364 | { |
2365 | const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
2366 | return this+input[0]; |
2367 | } |
2368 | |
2369 | inline bool apply (hb_ot_apply_context_t *c) const |
2370 | { |
2371 | TRACE_APPLY (this); |
2372 | const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
2373 | |
2374 | unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); |
2375 | if (likely (index == NOT_COVERED)) return_trace (false); |
2376 | |
2377 | const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
2378 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
2379 | struct ChainContextApplyLookupContext lookup_context = { |
2380 | {match_coverage}, |
2381 | {this, this, this} |
2382 | }; |
2383 | return_trace (chain_context_apply_lookup (c, |
2384 | backtrack.len, (const HBUINT16 *) backtrack.arrayZ, |
2385 | input.len, (const HBUINT16 *) input.arrayZ + 1, |
2386 | lookahead.len, (const HBUINT16 *) lookahead.arrayZ, |
2387 | lookup.len, lookup.arrayZ, lookup_context)); |
2388 | } |
2389 | |
2390 | inline bool subset (hb_subset_context_t *c) const |
2391 | { |
2392 | TRACE_SUBSET (this); |
2393 | // TODO(subset) |
2394 | return_trace (false); |
2395 | } |
2396 | |
2397 | inline bool sanitize (hb_sanitize_context_t *c) const |
2398 | { |
2399 | TRACE_SANITIZE (this); |
2400 | if (!backtrack.sanitize (c, this)) return_trace (false); |
2401 | const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); |
2402 | if (!input.sanitize (c, this)) return_trace (false); |
2403 | if (!input.len) return_trace (false); /* To be consistent with Context. */ |
2404 | const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); |
2405 | if (!lookahead.sanitize (c, this)) return_trace (false); |
2406 | const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); |
2407 | return_trace (lookup.sanitize (c)); |
2408 | } |
2409 | |
2410 | protected: |
2411 | HBUINT16 format; /* Format identifier--format = 3 */ |
2412 | OffsetArrayOf<Coverage> |
2413 | backtrack; /* Array of coverage tables |
2414 | * in backtracking sequence, in glyph |
2415 | * sequence order */ |
2416 | OffsetArrayOf<Coverage> |
2417 | inputX ; /* Array of coverage |
2418 | * tables in input sequence, in glyph |
2419 | * sequence order */ |
2420 | OffsetArrayOf<Coverage> |
2421 | lookaheadX; /* Array of coverage tables |
2422 | * in lookahead sequence, in glyph |
2423 | * sequence order */ |
2424 | ArrayOf<LookupRecord> |
2425 | lookupX; /* Array of LookupRecords--in |
2426 | * design order) */ |
2427 | public: |
2428 | DEFINE_SIZE_MIN (10); |
2429 | }; |
2430 | |
2431 | struct ChainContext |
2432 | { |
2433 | template <typename context_t> |
2434 | inline typename context_t::return_t dispatch (context_t *c) const |
2435 | { |
2436 | TRACE_DISPATCH (this, u.format); |
2437 | if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
2438 | switch (u.format) { |
2439 | case 1: return_trace (c->dispatch (u.format1)); |
2440 | case 2: return_trace (c->dispatch (u.format2)); |
2441 | case 3: return_trace (c->dispatch (u.format3)); |
2442 | default:return_trace (c->default_return_value ()); |
2443 | } |
2444 | } |
2445 | |
2446 | protected: |
2447 | union { |
2448 | HBUINT16 format; /* Format identifier */ |
2449 | ChainContextFormat1 format1; |
2450 | ChainContextFormat2 format2; |
2451 | ChainContextFormat3 format3; |
2452 | } u; |
2453 | }; |
2454 | |
2455 | |
2456 | template <typename T> |
2457 | struct ExtensionFormat1 |
2458 | { |
2459 | inline unsigned int get_type (void) const { return extensionLookupType; } |
2460 | |
2461 | template <typename X> |
2462 | inline const X& get_subtable (void) const |
2463 | { |
2464 | unsigned int offset = extensionOffset; |
2465 | if (unlikely (!offset)) return Null(typename T::SubTable); |
2466 | return StructAtOffset<typename T::SubTable> (this, offset); |
2467 | } |
2468 | |
2469 | template <typename context_t> |
2470 | inline typename context_t::return_t dispatch (context_t *c) const |
2471 | { |
2472 | TRACE_DISPATCH (this, format); |
2473 | if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ()); |
2474 | return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type ())); |
2475 | } |
2476 | |
2477 | /* This is called from may_dispatch() above with hb_sanitize_context_t. */ |
2478 | inline bool sanitize (hb_sanitize_context_t *c) const |
2479 | { |
2480 | TRACE_SANITIZE (this); |
2481 | return_trace (c->check_struct (this) && |
2482 | extensionOffset != 0 && |
2483 | extensionLookupType != T::SubTable::Extension); |
2484 | } |
2485 | |
2486 | protected: |
2487 | HBUINT16 format; /* Format identifier. Set to 1. */ |
2488 | HBUINT16 extensionLookupType; /* Lookup type of subtable referenced |
2489 | * by ExtensionOffset (i.e. the |
2490 | * extension subtable). */ |
2491 | HBUINT32 extensionOffset; /* Offset to the extension subtable, |
2492 | * of lookup type subtable. */ |
2493 | public: |
2494 | DEFINE_SIZE_STATIC (8); |
2495 | }; |
2496 | |
2497 | template <typename T> |
2498 | struct Extension |
2499 | { |
2500 | inline unsigned int get_type (void) const |
2501 | { |
2502 | switch (u.format) { |
2503 | case 1: return u.format1.get_type (); |
2504 | default:return 0; |
2505 | } |
2506 | } |
2507 | template <typename X> |
2508 | inline const X& get_subtable (void) const |
2509 | { |
2510 | switch (u.format) { |
2511 | case 1: return u.format1.template get_subtable<typename T::SubTable> (); |
2512 | default:return Null(typename T::SubTable); |
2513 | } |
2514 | } |
2515 | |
2516 | template <typename context_t> |
2517 | inline typename context_t::return_t dispatch (context_t *c) const |
2518 | { |
2519 | TRACE_DISPATCH (this, u.format); |
2520 | if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); |
2521 | switch (u.format) { |
2522 | case 1: return_trace (u.format1.dispatch (c)); |
2523 | default:return_trace (c->default_return_value ()); |
2524 | } |
2525 | } |
2526 | |
2527 | protected: |
2528 | union { |
2529 | HBUINT16 format; /* Format identifier */ |
2530 | ExtensionFormat1<T> format1; |
2531 | } u; |
2532 | }; |
2533 | |
2534 | |
2535 | /* |
2536 | * GSUB/GPOS Common |
2537 | */ |
2538 | |
2539 | struct GSUBGPOS |
2540 | { |
2541 | inline bool has_data (void) const { return version.to_int () != 0; } |
2542 | inline unsigned int get_script_count (void) const |
2543 | { return (this+scriptList).len; } |
2544 | inline const Tag& get_script_tag (unsigned int i) const |
2545 | { return (this+scriptList).get_tag (i); } |
2546 | inline unsigned int get_script_tags (unsigned int start_offset, |
2547 | unsigned int *script_count /* IN/OUT */, |
2548 | hb_tag_t *script_tags /* OUT */) const |
2549 | { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } |
2550 | inline const Script& get_script (unsigned int i) const |
2551 | { return (this+scriptList)[i]; } |
2552 | inline bool find_script_index (hb_tag_t tag, unsigned int *index) const |
2553 | { return (this+scriptList).find_index (tag, index); } |
2554 | |
2555 | inline unsigned int get_feature_count (void) const |
2556 | { return (this+featureList).len; } |
2557 | inline hb_tag_t get_feature_tag (unsigned int i) const |
2558 | { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); } |
2559 | inline unsigned int get_feature_tags (unsigned int start_offset, |
2560 | unsigned int *feature_count /* IN/OUT */, |
2561 | hb_tag_t *feature_tags /* OUT */) const |
2562 | { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } |
2563 | inline const Feature& get_feature (unsigned int i) const |
2564 | { return (this+featureList)[i]; } |
2565 | inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const |
2566 | { return (this+featureList).find_index (tag, index); } |
2567 | |
2568 | inline unsigned int get_lookup_count (void) const |
2569 | { return (this+lookupList).len; } |
2570 | inline const Lookup& get_lookup (unsigned int i) const |
2571 | { return (this+lookupList)[i]; } |
2572 | |
2573 | inline bool find_variations_index (const int *coords, unsigned int num_coords, |
2574 | unsigned int *index) const |
2575 | { return (version.to_int () >= 0x00010001u ? this+featureVars : Null(FeatureVariations)) |
2576 | .find_index (coords, num_coords, index); } |
2577 | inline const Feature& get_feature_variation (unsigned int feature_index, |
2578 | unsigned int variations_index) const |
2579 | { |
2580 | if (FeatureVariations::NOT_FOUND_INDEX != variations_index && |
2581 | version.to_int () >= 0x00010001u) |
2582 | { |
2583 | const Feature *feature = (this+featureVars).find_substitute (variations_index, |
2584 | feature_index); |
2585 | if (feature) |
2586 | return *feature; |
2587 | } |
2588 | return get_feature (feature_index); |
2589 | } |
2590 | |
2591 | template <typename TLookup> |
2592 | inline bool subset (hb_subset_context_t *c) const |
2593 | { |
2594 | TRACE_SUBSET (this); |
2595 | struct GSUBGPOS *out = c->serializer->embed (*this); |
2596 | if (unlikely (!out)) return_trace (false); |
2597 | out->scriptList.serialize_subset (c, this+scriptList, out); |
2598 | out->featureList.serialize_subset (c, this+featureList, out); |
2599 | |
2600 | typedef OffsetListOf<TLookup> TLookupList; |
2601 | /* TODO Use intersects() to count how many subtables survive? */ |
2602 | CastR<OffsetTo<TLookupList> > (out->lookupList) |
2603 | .serialize_subset (c, |
2604 | this+CastR<const OffsetTo<TLookupList> > (lookupList), |
2605 | out); |
2606 | |
2607 | if (version.to_int () >= 0x00010001u) |
2608 | out->featureVars.serialize_subset (c, this+featureVars, out); |
2609 | return_trace (true); |
2610 | } |
2611 | |
2612 | inline unsigned int get_size (void) const |
2613 | { |
2614 | return min_size + |
2615 | (version.to_int () >= 0x00010001u ? featureVars.static_size : 0); |
2616 | } |
2617 | |
2618 | template <typename TLookup> |
2619 | inline bool sanitize (hb_sanitize_context_t *c) const |
2620 | { |
2621 | TRACE_SANITIZE (this); |
2622 | typedef OffsetListOf<TLookup> TLookupList; |
2623 | return_trace (version.sanitize (c) && |
2624 | likely (version.major == 1) && |
2625 | scriptList.sanitize (c, this) && |
2626 | featureList.sanitize (c, this) && |
2627 | CastR<OffsetTo<TLookupList> > (lookupList).sanitize (c, this) && |
2628 | (version.to_int () < 0x00010001u || featureVars.sanitize (c, this))); |
2629 | } |
2630 | |
2631 | template <typename T> |
2632 | struct accelerator_t |
2633 | { |
2634 | inline void init (hb_face_t *face) |
2635 | { |
2636 | this->blob = hb_sanitize_context_t().reference_table<T> (face); |
2637 | table = this->blob->template as<T> (); |
2638 | |
2639 | this->lookup_count = table->get_lookup_count (); |
2640 | |
2641 | this->accels = (hb_ot_layout_lookup_accelerator_t *) calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t)); |
2642 | if (unlikely (!this->accels)) |
2643 | this->lookup_count = 0; |
2644 | |
2645 | for (unsigned int i = 0; i < this->lookup_count; i++) |
2646 | this->accels[i].init (table->get_lookup (i)); |
2647 | } |
2648 | |
2649 | inline void fini (void) |
2650 | { |
2651 | for (unsigned int i = 0; i < this->lookup_count; i++) |
2652 | this->accels[i].fini (); |
2653 | free (this->accels); |
2654 | hb_blob_destroy (this->blob); |
2655 | } |
2656 | |
2657 | hb_blob_t *blob; |
2658 | const T *table; |
2659 | unsigned int lookup_count; |
2660 | hb_ot_layout_lookup_accelerator_t *accels; |
2661 | }; |
2662 | |
2663 | protected: |
2664 | FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set |
2665 | * to 0x00010000u */ |
2666 | OffsetTo<ScriptList> |
2667 | scriptList; /* ScriptList table */ |
2668 | OffsetTo<FeatureList> |
2669 | featureList; /* FeatureList table */ |
2670 | OffsetTo<LookupList> |
2671 | lookupList; /* LookupList table */ |
2672 | LOffsetTo<FeatureVariations> |
2673 | featureVars; /* Offset to Feature Variations |
2674 | table--from beginning of table |
2675 | * (may be NULL). Introduced |
2676 | * in version 0x00010001. */ |
2677 | public: |
2678 | DEFINE_SIZE_MIN (10); |
2679 | }; |
2680 | |
2681 | |
2682 | } /* namespace OT */ |
2683 | |
2684 | |
2685 | #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */ |
2686 | |