1/*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30#define HB_OT_LAYOUT_GSUBGPOS_HH
31
32#include "hb.hh"
33#include "hb-buffer.hh"
34#include "hb-map.hh"
35#include "hb-set.hh"
36#include "hb-ot-map.hh"
37#include "hb-ot-layout-common.hh"
38#include "hb-ot-layout-gdef-table.hh"
39
40
41namespace OT {
42
43
44struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
46{
47 template <typename T>
48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
49 static return_t default_return_value () { return false; }
50 bool stop_sublookup_iteration (return_t r) const { return r; }
51
52 const hb_set_t *glyphs;
53
54 hb_intersects_context_t (const hb_set_t *glyphs_) :
55 glyphs (glyphs_) {}
56};
57
58struct hb_closure_context_t :
59 hb_dispatch_context_t<hb_closure_context_t>
60{
61 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
62 template <typename T>
63 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
64 static return_t default_return_value () { return hb_empty_t (); }
65 void recurse (unsigned int lookup_index)
66 {
67 if (unlikely (nesting_level_left == 0 || !recurse_func))
68 return;
69
70 nesting_level_left--;
71 recurse_func (this, lookup_index);
72 nesting_level_left++;
73 }
74
75 bool lookup_limit_exceeded ()
76 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
77
78 bool should_visit_lookup (unsigned int lookup_index)
79 {
80 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
81 return false;
82
83 if (is_lookup_done (lookup_index))
84 return false;
85
86 done_lookups->set (lookup_index, glyphs->get_population ());
87 return true;
88 }
89
90 bool is_lookup_done (unsigned int lookup_index)
91 {
92 /* Have we visited this lookup with the current set of glyphs? */
93 return done_lookups->get (lookup_index) == glyphs->get_population ();
94 }
95
96 hb_face_t *face;
97 hb_set_t *glyphs;
98 hb_set_t output[1];
99 recurse_func_t recurse_func;
100 unsigned int nesting_level_left;
101
102 hb_closure_context_t (hb_face_t *face_,
103 hb_set_t *glyphs_,
104 hb_map_t *done_lookups_,
105 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
106 face (face_),
107 glyphs (glyphs_),
108 recurse_func (nullptr),
109 nesting_level_left (nesting_level_left_),
110 done_lookups (done_lookups_),
111 lookup_count (0)
112 {}
113
114 ~hb_closure_context_t () { flush (); }
115
116 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
117
118 void flush ()
119 {
120 hb_set_del_range (output, face->get_num_glyphs (), hb_set_get_max (output)); /* Remove invalid glyphs. */
121 hb_set_union (glyphs, output);
122 hb_set_clear (output);
123 }
124
125 private:
126 hb_map_t *done_lookups;
127 unsigned int lookup_count;
128};
129
130struct hb_closure_lookups_context_t :
131 hb_dispatch_context_t<hb_closure_lookups_context_t>
132{
133 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
134 template <typename T>
135 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
136 static return_t default_return_value () { return hb_empty_t (); }
137 void recurse (unsigned lookup_index)
138 {
139 if (unlikely (nesting_level_left == 0 || !recurse_func))
140 return;
141
142 /* Return if new lookup was recursed to before. */
143 if (is_lookup_visited (lookup_index))
144 return;
145
146 set_lookup_visited (lookup_index);
147 nesting_level_left--;
148 recurse_func (this, lookup_index);
149 nesting_level_left++;
150 }
151
152 void set_lookup_visited (unsigned lookup_index)
153 { visited_lookups->add (lookup_index); }
154
155 void set_lookup_inactive (unsigned lookup_index)
156 { inactive_lookups->add (lookup_index); }
157
158 bool lookup_limit_exceeded ()
159 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
160
161 bool is_lookup_visited (unsigned lookup_index)
162 {
163 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
164 return true;
165
166 return visited_lookups->has (lookup_index);
167 }
168
169 hb_face_t *face;
170 const hb_set_t *glyphs;
171 recurse_func_t recurse_func;
172 unsigned int nesting_level_left;
173
174 hb_closure_lookups_context_t (hb_face_t *face_,
175 const hb_set_t *glyphs_,
176 hb_set_t *visited_lookups_,
177 hb_set_t *inactive_lookups_,
178 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
179 face (face_),
180 glyphs (glyphs_),
181 recurse_func (nullptr),
182 nesting_level_left (nesting_level_left_),
183 visited_lookups (visited_lookups_),
184 inactive_lookups (inactive_lookups_),
185 lookup_count (0) {}
186
187 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
188
189 private:
190 hb_set_t *visited_lookups;
191 hb_set_t *inactive_lookups;
192 unsigned int lookup_count;
193};
194
195struct hb_would_apply_context_t :
196 hb_dispatch_context_t<hb_would_apply_context_t, bool>
197{
198 template <typename T>
199 return_t dispatch (const T &obj) { return obj.would_apply (this); }
200 static return_t default_return_value () { return false; }
201 bool stop_sublookup_iteration (return_t r) const { return r; }
202
203 hb_face_t *face;
204 const hb_codepoint_t *glyphs;
205 unsigned int len;
206 bool zero_context;
207
208 hb_would_apply_context_t (hb_face_t *face_,
209 const hb_codepoint_t *glyphs_,
210 unsigned int len_,
211 bool zero_context_) :
212 face (face_),
213 glyphs (glyphs_),
214 len (len_),
215 zero_context (zero_context_) {}
216};
217
218struct hb_collect_glyphs_context_t :
219 hb_dispatch_context_t<hb_collect_glyphs_context_t>
220{
221 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
222 template <typename T>
223 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
224 static return_t default_return_value () { return hb_empty_t (); }
225 void recurse (unsigned int lookup_index)
226 {
227 if (unlikely (nesting_level_left == 0 || !recurse_func))
228 return;
229
230 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
231 * past the previous check. For GSUB, we only want to collect the output
232 * glyphs in the recursion. If output is not requested, we can go home now.
233 *
234 * Note further, that the above is not exactly correct. A recursed lookup
235 * is allowed to match input that is not matched in the context, but that's
236 * not how most fonts are built. It's possible to relax that and recurse
237 * with all sets here if it proves to be an issue.
238 */
239
240 if (output == hb_set_get_empty ())
241 return;
242
243 /* Return if new lookup was recursed to before. */
244 if (recursed_lookups->has (lookup_index))
245 return;
246
247 hb_set_t *old_before = before;
248 hb_set_t *old_input = input;
249 hb_set_t *old_after = after;
250 before = input = after = hb_set_get_empty ();
251
252 nesting_level_left--;
253 recurse_func (this, lookup_index);
254 nesting_level_left++;
255
256 before = old_before;
257 input = old_input;
258 after = old_after;
259
260 recursed_lookups->add (lookup_index);
261 }
262
263 hb_face_t *face;
264 hb_set_t *before;
265 hb_set_t *input;
266 hb_set_t *after;
267 hb_set_t *output;
268 recurse_func_t recurse_func;
269 hb_set_t *recursed_lookups;
270 unsigned int nesting_level_left;
271
272 hb_collect_glyphs_context_t (hb_face_t *face_,
273 hb_set_t *glyphs_before, /* OUT. May be NULL */
274 hb_set_t *glyphs_input, /* OUT. May be NULL */
275 hb_set_t *glyphs_after, /* OUT. May be NULL */
276 hb_set_t *glyphs_output, /* OUT. May be NULL */
277 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
278 face (face_),
279 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
280 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
281 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
282 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
283 recurse_func (nullptr),
284 recursed_lookups (hb_set_create ()),
285 nesting_level_left (nesting_level_left_) {}
286 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
287
288 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
289};
290
291
292
293template <typename set_t>
294struct hb_collect_coverage_context_t :
295 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
296{
297 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
298 template <typename T>
299 return_t dispatch (const T &obj) { return obj.get_coverage (); }
300 static return_t default_return_value () { return Null (Coverage); }
301 bool stop_sublookup_iteration (return_t r) const
302 {
303 r.collect_coverage (set);
304 return false;
305 }
306
307 hb_collect_coverage_context_t (set_t *set_) :
308 set (set_) {}
309
310 set_t *set;
311};
312
313
314struct hb_ot_apply_context_t :
315 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
316{
317 struct matcher_t
318 {
319 matcher_t () :
320 lookup_props (0),
321 ignore_zwnj (false),
322 ignore_zwj (false),
323 mask (-1),
324#define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
325 syllable arg1(0),
326#undef arg1
327 match_func (nullptr),
328 match_data (nullptr) {}
329
330 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
331
332 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
333 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
334 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
335 void set_mask (hb_mask_t mask_) { mask = mask_; }
336 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
337 void set_match_func (match_func_t match_func_,
338 const void *match_data_)
339 { match_func = match_func_; match_data = match_data_; }
340
341 enum may_match_t {
342 MATCH_NO,
343 MATCH_YES,
344 MATCH_MAYBE
345 };
346
347 may_match_t may_match (const hb_glyph_info_t &info,
348 const HBUINT16 *glyph_data) const
349 {
350 if (!(info.mask & mask) ||
351 (syllable && syllable != info.syllable ()))
352 return MATCH_NO;
353
354 if (match_func)
355 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
356
357 return MATCH_MAYBE;
358 }
359
360 enum may_skip_t {
361 SKIP_NO,
362 SKIP_YES,
363 SKIP_MAYBE
364 };
365
366 may_skip_t may_skip (const hb_ot_apply_context_t *c,
367 const hb_glyph_info_t &info) const
368 {
369 if (!c->check_glyph_property (&info, lookup_props))
370 return SKIP_YES;
371
372 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
373 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
374 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
375 return SKIP_MAYBE;
376
377 return SKIP_NO;
378 }
379
380 protected:
381 unsigned int lookup_props;
382 bool ignore_zwnj;
383 bool ignore_zwj;
384 hb_mask_t mask;
385 uint8_t syllable;
386 match_func_t match_func;
387 const void *match_data;
388 };
389
390 struct skipping_iterator_t
391 {
392 void init (hb_ot_apply_context_t *c_, bool context_match = false)
393 {
394 c = c_;
395 match_glyph_data = nullptr;
396 matcher.set_match_func (nullptr, nullptr);
397 matcher.set_lookup_props (c->lookup_props);
398 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
399 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
400 /* Ignore ZWJ if we are matching context, or asked to. */
401 matcher.set_ignore_zwj (context_match || c->auto_zwj);
402 matcher.set_mask (context_match ? -1 : c->lookup_mask);
403 }
404 void set_lookup_props (unsigned int lookup_props)
405 {
406 matcher.set_lookup_props (lookup_props);
407 }
408 void set_match_func (matcher_t::match_func_t match_func_,
409 const void *match_data_,
410 const HBUINT16 glyph_data[])
411 {
412 matcher.set_match_func (match_func_, match_data_);
413 match_glyph_data = glyph_data;
414 }
415
416 void reset (unsigned int start_index_,
417 unsigned int num_items_)
418 {
419 idx = start_index_;
420 num_items = num_items_;
421 end = c->buffer->len;
422 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
423 }
424
425 void reject ()
426 {
427 num_items++;
428 if (match_glyph_data) match_glyph_data--;
429 }
430
431 matcher_t::may_skip_t
432 may_skip (const hb_glyph_info_t &info) const
433 { return matcher.may_skip (c, info); }
434
435 bool next ()
436 {
437 assert (num_items > 0);
438 while (idx + num_items < end)
439 {
440 idx++;
441 const hb_glyph_info_t &info = c->buffer->info[idx];
442
443 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
444 if (unlikely (skip == matcher_t::SKIP_YES))
445 continue;
446
447 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
448 if (match == matcher_t::MATCH_YES ||
449 (match == matcher_t::MATCH_MAYBE &&
450 skip == matcher_t::SKIP_NO))
451 {
452 num_items--;
453 if (match_glyph_data) match_glyph_data++;
454 return true;
455 }
456
457 if (skip == matcher_t::SKIP_NO)
458 return false;
459 }
460 return false;
461 }
462 bool prev ()
463 {
464 assert (num_items > 0);
465 while (idx > num_items - 1)
466 {
467 idx--;
468 const hb_glyph_info_t &info = c->buffer->out_info[idx];
469
470 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
471 if (unlikely (skip == matcher_t::SKIP_YES))
472 continue;
473
474 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
475 if (match == matcher_t::MATCH_YES ||
476 (match == matcher_t::MATCH_MAYBE &&
477 skip == matcher_t::SKIP_NO))
478 {
479 num_items--;
480 if (match_glyph_data) match_glyph_data++;
481 return true;
482 }
483
484 if (skip == matcher_t::SKIP_NO)
485 return false;
486 }
487 return false;
488 }
489
490 unsigned int idx;
491 protected:
492 hb_ot_apply_context_t *c;
493 matcher_t matcher;
494 const HBUINT16 *match_glyph_data;
495
496 unsigned int num_items;
497 unsigned int end;
498 };
499
500
501 const char *get_name () { return "APPLY"; }
502 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
503 template <typename T>
504 return_t dispatch (const T &obj) { return obj.apply (this); }
505 static return_t default_return_value () { return false; }
506 bool stop_sublookup_iteration (return_t r) const { return r; }
507 return_t recurse (unsigned int sub_lookup_index)
508 {
509 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
510 return default_return_value ();
511
512 nesting_level_left--;
513 bool ret = recurse_func (this, sub_lookup_index);
514 nesting_level_left++;
515 return ret;
516 }
517
518 skipping_iterator_t iter_input, iter_context;
519
520 hb_font_t *font;
521 hb_face_t *face;
522 hb_buffer_t *buffer;
523 recurse_func_t recurse_func;
524 const GDEF &gdef;
525 const VariationStore &var_store;
526
527 hb_direction_t direction;
528 hb_mask_t lookup_mask;
529 unsigned int table_index; /* GSUB/GPOS */
530 unsigned int lookup_index;
531 unsigned int lookup_props;
532 unsigned int nesting_level_left;
533
534 bool has_glyph_classes;
535 bool auto_zwnj;
536 bool auto_zwj;
537 bool random;
538
539 uint32_t random_state;
540
541
542 hb_ot_apply_context_t (unsigned int table_index_,
543 hb_font_t *font_,
544 hb_buffer_t *buffer_) :
545 iter_input (), iter_context (),
546 font (font_), face (font->face), buffer (buffer_),
547 recurse_func (nullptr),
548 gdef (
549#ifndef HB_NO_OT_LAYOUT
550 *face->table.GDEF->table
551#else
552 Null (GDEF)
553#endif
554 ),
555 var_store (gdef.get_var_store ()),
556 direction (buffer_->props.direction),
557 lookup_mask (1),
558 table_index (table_index_),
559 lookup_index ((unsigned int) -1),
560 lookup_props (0),
561 nesting_level_left (HB_MAX_NESTING_LEVEL),
562 has_glyph_classes (gdef.has_glyph_classes ()),
563 auto_zwnj (true),
564 auto_zwj (true),
565 random (false),
566 random_state (1) { init_iters (); }
567
568 void init_iters ()
569 {
570 iter_input.init (this, false);
571 iter_context.init (this, true);
572 }
573
574 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
575 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
576 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
577 void set_random (bool random_) { random = random_; }
578 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
579 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
580 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
581
582 uint32_t random_number ()
583 {
584 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
585 random_state = random_state * 48271 % 2147483647;
586 return random_state;
587 }
588
589 bool match_properties_mark (hb_codepoint_t glyph,
590 unsigned int glyph_props,
591 unsigned int match_props) const
592 {
593 /* If using mark filtering sets, the high short of
594 * match_props has the set index.
595 */
596 if (match_props & LookupFlag::UseMarkFilteringSet)
597 return gdef.mark_set_covers (match_props >> 16, glyph);
598
599 /* The second byte of match_props has the meaning
600 * "ignore marks of attachment type different than
601 * the attachment type specified."
602 */
603 if (match_props & LookupFlag::MarkAttachmentType)
604 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
605
606 return true;
607 }
608
609 bool check_glyph_property (const hb_glyph_info_t *info,
610 unsigned int match_props) const
611 {
612 hb_codepoint_t glyph = info->codepoint;
613 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
614
615 /* Not covered, if, for example, glyph class is ligature and
616 * match_props includes LookupFlags::IgnoreLigatures
617 */
618 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
619 return false;
620
621 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
622 return match_properties_mark (glyph, glyph_props, match_props);
623
624 return true;
625 }
626
627 void _set_glyph_class (hb_codepoint_t glyph_index,
628 unsigned int class_guess = 0,
629 bool ligature = false,
630 bool component = false) const
631 {
632 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
633
634 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
635 if (ligature)
636 {
637 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
638 /* In the only place that the MULTIPLIED bit is used, Uniscribe
639 * seems to only care about the "last" transformation between
640 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
641 * and ligate again, it forgives the multiplication and acts as
642 * if only ligation happened. As such, clear MULTIPLIED bit.
643 */
644 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
645 }
646 if (component)
647 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
648
649 if (likely (has_glyph_classes))
650 props = (props & ~HB_OT_LAYOUT_GLYPH_PROPS_CLASS_MASK) | gdef.get_glyph_props (glyph_index);
651 else if (class_guess)
652 props = (props & ~HB_OT_LAYOUT_GLYPH_PROPS_CLASS_MASK) | class_guess;
653
654 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
655 }
656
657 void replace_glyph (hb_codepoint_t glyph_index) const
658 {
659 _set_glyph_class (glyph_index);
660 buffer->replace_glyph (glyph_index);
661 }
662 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
663 {
664 _set_glyph_class (glyph_index);
665 buffer->cur().codepoint = glyph_index;
666 }
667 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
668 unsigned int class_guess) const
669 {
670 _set_glyph_class (glyph_index, class_guess, true);
671 buffer->replace_glyph (glyph_index);
672 }
673 void output_glyph_for_component (hb_codepoint_t glyph_index,
674 unsigned int class_guess) const
675 {
676 _set_glyph_class (glyph_index, class_guess, false, true);
677 buffer->output_glyph (glyph_index);
678 }
679};
680
681
682struct hb_get_subtables_context_t :
683 hb_dispatch_context_t<hb_get_subtables_context_t>
684{
685 template <typename Type>
686 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
687 {
688 const Type *typed_obj = (const Type *) obj;
689 return typed_obj->apply (c);
690 }
691
692 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
693
694 struct hb_applicable_t
695 {
696 template <typename T>
697 void init (const T &obj_, hb_apply_func_t apply_func_)
698 {
699 obj = &obj_;
700 apply_func = apply_func_;
701 digest.init ();
702 obj_.get_coverage ().collect_coverage (&digest);
703 }
704
705 bool apply (OT::hb_ot_apply_context_t *c) const
706 {
707 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
708 }
709
710 private:
711 const void *obj;
712 hb_apply_func_t apply_func;
713 hb_set_digest_t digest;
714 };
715
716 typedef hb_vector_t<hb_applicable_t> array_t;
717
718 /* Dispatch interface. */
719 template <typename T>
720 return_t dispatch (const T &obj)
721 {
722 hb_applicable_t *entry = array.push();
723 entry->init (obj, apply_to<T>);
724 return hb_empty_t ();
725 }
726 static return_t default_return_value () { return hb_empty_t (); }
727
728 hb_get_subtables_context_t (array_t &array_) :
729 array (array_) {}
730
731 array_t &array;
732};
733
734
735
736
737typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
738typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
739typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
740
741struct ContextClosureFuncs
742{
743 intersects_func_t intersects;
744};
745struct ContextCollectGlyphsFuncs
746{
747 collect_glyphs_func_t collect;
748};
749struct ContextApplyFuncs
750{
751 match_func_t match;
752};
753
754
755static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
756{
757 return glyphs->has (value);
758}
759static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
760{
761 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
762 return class_def.intersects_class (glyphs, value);
763}
764static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
765{
766 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
767 return (data+coverage).intersects (glyphs);
768}
769
770static inline bool array_is_subset_of (const hb_set_t *glyphs,
771 unsigned int count,
772 const HBUINT16 values[],
773 intersects_func_t intersects_func,
774 const void *intersects_data)
775{
776 for (const HBUINT16 &_ : + hb_iter (values, count))
777 if (!intersects_func (glyphs, _, intersects_data)) return false;
778 return true;
779}
780
781
782static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
783{
784 glyphs->add (value);
785}
786static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
787{
788 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
789 class_def.collect_class (glyphs, value);
790}
791static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
792{
793 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
794 (data+coverage).collect_coverage (glyphs);
795}
796static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
797 hb_set_t *glyphs,
798 unsigned int count,
799 const HBUINT16 values[],
800 collect_glyphs_func_t collect_func,
801 const void *collect_data)
802{
803 return
804 + hb_iter (values, count)
805 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
806 ;
807}
808
809
810static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
811{
812 return glyph_id == value;
813}
814static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
815{
816 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
817 return class_def.get_class (glyph_id) == value;
818}
819static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
820{
821 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
822 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
823}
824
825static inline bool would_match_input (hb_would_apply_context_t *c,
826 unsigned int count, /* Including the first glyph (not matched) */
827 const HBUINT16 input[], /* Array of input values--start with second glyph */
828 match_func_t match_func,
829 const void *match_data)
830{
831 if (count != c->len)
832 return false;
833
834 for (unsigned int i = 1; i < count; i++)
835 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
836 return false;
837
838 return true;
839}
840static inline bool match_input (hb_ot_apply_context_t *c,
841 unsigned int count, /* Including the first glyph (not matched) */
842 const HBUINT16 input[], /* Array of input values--start with second glyph */
843 match_func_t match_func,
844 const void *match_data,
845 unsigned int *end_offset,
846 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
847 unsigned int *p_total_component_count = nullptr)
848{
849 TRACE_APPLY (nullptr);
850
851 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
852
853 hb_buffer_t *buffer = c->buffer;
854
855 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
856 skippy_iter.reset (buffer->idx, count - 1);
857 skippy_iter.set_match_func (match_func, match_data, input);
858
859 /*
860 * This is perhaps the trickiest part of OpenType... Remarks:
861 *
862 * - If all components of the ligature were marks, we call this a mark ligature.
863 *
864 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
865 * it as a ligature glyph.
866 *
867 * - Ligatures cannot be formed across glyphs attached to different components
868 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
869 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
870 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
871 * There are a couple of exceptions to this:
872 *
873 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
874 * assuming that the font designer knows what they are doing (otherwise it can
875 * break Indic stuff when a matra wants to ligate with a conjunct,
876 *
877 * o If two marks want to ligate and they belong to different components of the
878 * same ligature glyph, and said ligature glyph is to be ignored according to
879 * mark-filtering rules, then allow.
880 * https://github.com/harfbuzz/harfbuzz/issues/545
881 */
882
883 unsigned int total_component_count = 0;
884 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
885
886 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
887 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
888
889 enum {
890 LIGBASE_NOT_CHECKED,
891 LIGBASE_MAY_NOT_SKIP,
892 LIGBASE_MAY_SKIP
893 } ligbase = LIGBASE_NOT_CHECKED;
894
895 match_positions[0] = buffer->idx;
896 for (unsigned int i = 1; i < count; i++)
897 {
898 if (!skippy_iter.next ()) return_trace (false);
899
900 match_positions[i] = skippy_iter.idx;
901
902 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
903 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
904
905 if (first_lig_id && first_lig_comp)
906 {
907 /* If first component was attached to a previous ligature component,
908 * all subsequent components should be attached to the same ligature
909 * component, otherwise we shouldn't ligate them... */
910 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
911 {
912 /* ...unless, we are attached to a base ligature and that base
913 * ligature is ignorable. */
914 if (ligbase == LIGBASE_NOT_CHECKED)
915 {
916 bool found = false;
917 const auto *out = buffer->out_info;
918 unsigned int j = buffer->out_len;
919 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
920 {
921 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
922 {
923 j--;
924 found = true;
925 break;
926 }
927 j--;
928 }
929
930 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
931 ligbase = LIGBASE_MAY_SKIP;
932 else
933 ligbase = LIGBASE_MAY_NOT_SKIP;
934 }
935
936 if (ligbase == LIGBASE_MAY_NOT_SKIP)
937 return_trace (false);
938 }
939 }
940 else
941 {
942 /* If first component was NOT attached to a previous ligature component,
943 * all subsequent components should also NOT be attached to any ligature
944 * component, unless they are attached to the first component itself! */
945 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
946 return_trace (false);
947 }
948
949 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
950 }
951
952 *end_offset = skippy_iter.idx - buffer->idx + 1;
953
954 if (p_total_component_count)
955 *p_total_component_count = total_component_count;
956
957 return_trace (true);
958}
959static inline bool ligate_input (hb_ot_apply_context_t *c,
960 unsigned int count, /* Including the first glyph */
961 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
962 unsigned int match_length,
963 hb_codepoint_t lig_glyph,
964 unsigned int total_component_count)
965{
966 TRACE_APPLY (nullptr);
967
968 hb_buffer_t *buffer = c->buffer;
969
970 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
971
972 /* - If a base and one or more marks ligate, consider that as a base, NOT
973 * ligature, such that all following marks can still attach to it.
974 * https://github.com/harfbuzz/harfbuzz/issues/1109
975 *
976 * - If all components of the ligature were marks, we call this a mark ligature.
977 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
978 * the ligature to keep its old ligature id. This will allow it to attach to
979 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
980 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
981 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
982 * later, we don't want them to lose their ligature id/component, otherwise
983 * GPOS will fail to correctly position the mark ligature on top of the
984 * LAM,LAM,HEH ligature. See:
985 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
986 *
987 * - If a ligature is formed of components that some of which are also ligatures
988 * themselves, and those ligature components had marks attached to *their*
989 * components, we have to attach the marks to the new ligature component
990 * positions! Now *that*'s tricky! And these marks may be following the
991 * last component of the whole sequence, so we should loop forward looking
992 * for them and update them.
993 *
994 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
995 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
996 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
997 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
998 * the new ligature with a component value of 2.
999 *
1000 * This in fact happened to a font... See:
1001 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1002 */
1003
1004 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1005 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1006 for (unsigned int i = 1; i < count; i++)
1007 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1008 {
1009 is_base_ligature = false;
1010 is_mark_ligature = false;
1011 break;
1012 }
1013 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1014
1015 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1016 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1017 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1018 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1019 unsigned int components_so_far = last_num_components;
1020
1021 if (is_ligature)
1022 {
1023 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1024 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1025 {
1026 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1027 }
1028 }
1029 c->replace_glyph_with_ligature (lig_glyph, klass);
1030
1031 for (unsigned int i = 1; i < count; i++)
1032 {
1033 while (buffer->idx < match_positions[i] && buffer->successful)
1034 {
1035 if (is_ligature)
1036 {
1037 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1038 if (this_comp == 0)
1039 this_comp = last_num_components;
1040 unsigned int new_lig_comp = components_so_far - last_num_components +
1041 hb_min (this_comp, last_num_components);
1042 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1043 }
1044 buffer->next_glyph ();
1045 }
1046
1047 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1048 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1049 components_so_far += last_num_components;
1050
1051 /* Skip the base glyph */
1052 buffer->idx++;
1053 }
1054
1055 if (!is_mark_ligature && last_lig_id)
1056 {
1057 /* Re-adjust components for any marks following. */
1058 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1059 {
1060 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1061
1062 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1063 if (!this_comp) break;
1064
1065 unsigned new_lig_comp = components_so_far - last_num_components +
1066 hb_min (this_comp, last_num_components);
1067 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1068 }
1069 }
1070 return_trace (true);
1071}
1072
1073static inline bool match_backtrack (hb_ot_apply_context_t *c,
1074 unsigned int count,
1075 const HBUINT16 backtrack[],
1076 match_func_t match_func,
1077 const void *match_data,
1078 unsigned int *match_start)
1079{
1080 TRACE_APPLY (nullptr);
1081
1082 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1083 skippy_iter.reset (c->buffer->backtrack_len (), count);
1084 skippy_iter.set_match_func (match_func, match_data, backtrack);
1085
1086 for (unsigned int i = 0; i < count; i++)
1087 if (!skippy_iter.prev ())
1088 return_trace (false);
1089
1090 *match_start = skippy_iter.idx;
1091
1092 return_trace (true);
1093}
1094
1095static inline bool match_lookahead (hb_ot_apply_context_t *c,
1096 unsigned int count,
1097 const HBUINT16 lookahead[],
1098 match_func_t match_func,
1099 const void *match_data,
1100 unsigned int offset,
1101 unsigned int *end_index)
1102{
1103 TRACE_APPLY (nullptr);
1104
1105 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1106 skippy_iter.reset (c->buffer->idx + offset - 1, count);
1107 skippy_iter.set_match_func (match_func, match_data, lookahead);
1108
1109 for (unsigned int i = 0; i < count; i++)
1110 if (!skippy_iter.next ())
1111 return_trace (false);
1112
1113 *end_index = skippy_iter.idx + 1;
1114
1115 return_trace (true);
1116}
1117
1118
1119
1120struct LookupRecord
1121{
1122 LookupRecord* copy (hb_serialize_context_t *c,
1123 const hb_map_t *lookup_map) const
1124 {
1125 TRACE_SERIALIZE (this);
1126 auto *out = c->embed (*this);
1127 if (unlikely (!out)) return_trace (nullptr);
1128
1129 out->lookupListIndex = hb_map_get (lookup_map, lookupListIndex);
1130 return_trace (out);
1131 }
1132
1133 bool sanitize (hb_sanitize_context_t *c) const
1134 {
1135 TRACE_SANITIZE (this);
1136 return_trace (c->check_struct (this));
1137 }
1138
1139 HBUINT16 sequenceIndex; /* Index into current glyph
1140 * sequence--first glyph = 0 */
1141 HBUINT16 lookupListIndex; /* Lookup to apply to that
1142 * position--zero--based */
1143 public:
1144 DEFINE_SIZE_STATIC (4);
1145};
1146
1147template <typename context_t>
1148static inline void recurse_lookups (context_t *c,
1149 unsigned int lookupCount,
1150 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1151{
1152 for (unsigned int i = 0; i < lookupCount; i++)
1153 c->recurse (lookupRecord[i].lookupListIndex);
1154}
1155
1156static inline bool apply_lookup (hb_ot_apply_context_t *c,
1157 unsigned int count, /* Including the first glyph */
1158 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1159 unsigned int lookupCount,
1160 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1161 unsigned int match_length)
1162{
1163 TRACE_APPLY (nullptr);
1164
1165 hb_buffer_t *buffer = c->buffer;
1166 int end;
1167
1168 /* All positions are distance from beginning of *output* buffer.
1169 * Adjust. */
1170 {
1171 unsigned int bl = buffer->backtrack_len ();
1172 end = bl + match_length;
1173
1174 int delta = bl - buffer->idx;
1175 /* Convert positions to new indexing. */
1176 for (unsigned int j = 0; j < count; j++)
1177 match_positions[j] += delta;
1178 }
1179
1180 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1181 {
1182 unsigned int idx = lookupRecord[i].sequenceIndex;
1183 if (idx >= count)
1184 continue;
1185
1186 /* Don't recurse to ourself at same position.
1187 * Note that this test is too naive, it doesn't catch longer loops. */
1188 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index)
1189 continue;
1190
1191 if (unlikely (!buffer->move_to (match_positions[idx])))
1192 break;
1193
1194 if (unlikely (buffer->max_ops <= 0))
1195 break;
1196
1197 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1198 if (!c->recurse (lookupRecord[i].lookupListIndex))
1199 continue;
1200
1201 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1202 int delta = new_len - orig_len;
1203
1204 if (!delta)
1205 continue;
1206
1207 /* Recursed lookup changed buffer len. Adjust.
1208 *
1209 * TODO:
1210 *
1211 * Right now, if buffer length increased by n, we assume n new glyphs
1212 * were added right after the current position, and if buffer length
1213 * was decreased by n, we assume n match positions after the current
1214 * one where removed. The former (buffer length increased) case is
1215 * fine, but the decrease case can be improved in at least two ways,
1216 * both of which are significant:
1217 *
1218 * - If recursed-to lookup is MultipleSubst and buffer length
1219 * decreased, then it's current match position that was deleted,
1220 * NOT the one after it.
1221 *
1222 * - If buffer length was decreased by n, it does not necessarily
1223 * mean that n match positions where removed, as there might
1224 * have been marks and default-ignorables in the sequence. We
1225 * should instead drop match positions between current-position
1226 * and current-position + n instead.
1227 *
1228 * It should be possible to construct tests for both of these cases.
1229 */
1230
1231 end += delta;
1232 if (end <= int (match_positions[idx]))
1233 {
1234 /* End might end up being smaller than match_positions[idx] if the recursed
1235 * lookup ended up removing many items, more than we have had matched.
1236 * Just never rewind end back and get out of here.
1237 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1238 end = match_positions[idx];
1239 /* There can't be any further changes. */
1240 break;
1241 }
1242
1243 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1244
1245 if (delta > 0)
1246 {
1247 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1248 break;
1249 }
1250 else
1251 {
1252 /* NOTE: delta is negative. */
1253 delta = hb_max (delta, (int) next - (int) count);
1254 next -= delta;
1255 }
1256
1257 /* Shift! */
1258 memmove (match_positions + next + delta, match_positions + next,
1259 (count - next) * sizeof (match_positions[0]));
1260 next += delta;
1261 count += delta;
1262
1263 /* Fill in new entries. */
1264 for (unsigned int j = idx + 1; j < next; j++)
1265 match_positions[j] = match_positions[j - 1] + 1;
1266
1267 /* And fixup the rest. */
1268 for (; next < count; next++)
1269 match_positions[next] += delta;
1270 }
1271
1272 buffer->move_to (end);
1273
1274 return_trace (true);
1275}
1276
1277
1278
1279/* Contextual lookups */
1280
1281struct ContextClosureLookupContext
1282{
1283 ContextClosureFuncs funcs;
1284 const void *intersects_data;
1285};
1286
1287struct ContextCollectGlyphsLookupContext
1288{
1289 ContextCollectGlyphsFuncs funcs;
1290 const void *collect_data;
1291};
1292
1293struct ContextApplyLookupContext
1294{
1295 ContextApplyFuncs funcs;
1296 const void *match_data;
1297};
1298
1299static inline bool context_intersects (const hb_set_t *glyphs,
1300 unsigned int inputCount, /* Including the first glyph (not matched) */
1301 const HBUINT16 input[], /* Array of input values--start with second glyph */
1302 ContextClosureLookupContext &lookup_context)
1303{
1304 return array_is_subset_of (glyphs,
1305 inputCount ? inputCount - 1 : 0, input,
1306 lookup_context.funcs.intersects, lookup_context.intersects_data);
1307}
1308
1309static inline void context_closure_lookup (hb_closure_context_t *c,
1310 unsigned int inputCount, /* Including the first glyph (not matched) */
1311 const HBUINT16 input[], /* Array of input values--start with second glyph */
1312 unsigned int lookupCount,
1313 const LookupRecord lookupRecord[],
1314 ContextClosureLookupContext &lookup_context)
1315{
1316 if (context_intersects (c->glyphs,
1317 inputCount, input,
1318 lookup_context))
1319 recurse_lookups (c,
1320 lookupCount, lookupRecord);
1321}
1322
1323static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1324 unsigned int inputCount, /* Including the first glyph (not matched) */
1325 const HBUINT16 input[], /* Array of input values--start with second glyph */
1326 unsigned int lookupCount,
1327 const LookupRecord lookupRecord[],
1328 ContextCollectGlyphsLookupContext &lookup_context)
1329{
1330 collect_array (c, c->input,
1331 inputCount ? inputCount - 1 : 0, input,
1332 lookup_context.funcs.collect, lookup_context.collect_data);
1333 recurse_lookups (c,
1334 lookupCount, lookupRecord);
1335}
1336
1337static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1338 unsigned int inputCount, /* Including the first glyph (not matched) */
1339 const HBUINT16 input[], /* Array of input values--start with second glyph */
1340 unsigned int lookupCount HB_UNUSED,
1341 const LookupRecord lookupRecord[] HB_UNUSED,
1342 ContextApplyLookupContext &lookup_context)
1343{
1344 return would_match_input (c,
1345 inputCount, input,
1346 lookup_context.funcs.match, lookup_context.match_data);
1347}
1348static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1349 unsigned int inputCount, /* Including the first glyph (not matched) */
1350 const HBUINT16 input[], /* Array of input values--start with second glyph */
1351 unsigned int lookupCount,
1352 const LookupRecord lookupRecord[],
1353 ContextApplyLookupContext &lookup_context)
1354{
1355 unsigned int match_length = 0;
1356 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1357 return match_input (c,
1358 inputCount, input,
1359 lookup_context.funcs.match, lookup_context.match_data,
1360 &match_length, match_positions)
1361 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length),
1362 apply_lookup (c,
1363 inputCount, match_positions,
1364 lookupCount, lookupRecord,
1365 match_length));
1366}
1367
1368struct Rule
1369{
1370 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1371 {
1372 return context_intersects (glyphs,
1373 inputCount, inputZ.arrayZ,
1374 lookup_context);
1375 }
1376
1377 void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1378 {
1379 if (unlikely (c->lookup_limit_exceeded ())) return;
1380
1381 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1382 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1383 context_closure_lookup (c,
1384 inputCount, inputZ.arrayZ,
1385 lookupCount, lookupRecord.arrayZ,
1386 lookup_context);
1387 }
1388
1389 void closure_lookups (hb_closure_lookups_context_t *c) const
1390 {
1391 if (unlikely (c->lookup_limit_exceeded ())) return;
1392
1393 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1394 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1395 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1396 }
1397
1398 void collect_glyphs (hb_collect_glyphs_context_t *c,
1399 ContextCollectGlyphsLookupContext &lookup_context) const
1400 {
1401 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1402 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1403 context_collect_glyphs_lookup (c,
1404 inputCount, inputZ.arrayZ,
1405 lookupCount, lookupRecord.arrayZ,
1406 lookup_context);
1407 }
1408
1409 bool would_apply (hb_would_apply_context_t *c,
1410 ContextApplyLookupContext &lookup_context) const
1411 {
1412 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1413 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1414 return context_would_apply_lookup (c,
1415 inputCount, inputZ.arrayZ,
1416 lookupCount, lookupRecord.arrayZ,
1417 lookup_context);
1418 }
1419
1420 bool apply (hb_ot_apply_context_t *c,
1421 ContextApplyLookupContext &lookup_context) const
1422 {
1423 TRACE_APPLY (this);
1424 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1425 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1426 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1427 }
1428
1429 bool serialize (hb_serialize_context_t *c,
1430 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1431 const hb_map_t *lookup_map) const
1432 {
1433 TRACE_SERIALIZE (this);
1434 auto *out = c->start_embed (this);
1435 if (unlikely (!c->extend_min (out))) return_trace (false);
1436
1437 out->inputCount = inputCount;
1438 out->lookupCount = lookupCount;
1439
1440 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1441 for (const auto org : input)
1442 {
1443 HBUINT16 d;
1444 d = input_mapping->get (org);
1445 c->copy (d);
1446 }
1447
1448 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1449 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1450 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
1451 c->copy (lookupRecord[i], lookup_map);
1452
1453 return_trace (true);
1454 }
1455
1456 bool subset (hb_subset_context_t *c,
1457 const hb_map_t *lookup_map,
1458 const hb_map_t *klass_map = nullptr) const
1459 {
1460 TRACE_SUBSET (this);
1461
1462 const hb_array_t<const HBUINT16> input = inputZ.as_array ((inputCount ? inputCount - 1 : 0));
1463 if (!input.length) return_trace (false);
1464
1465 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1466 if (!hb_all (input, mapping)) return_trace (false);
1467 return_trace (serialize (c->serializer, mapping, lookup_map));
1468 }
1469
1470 public:
1471 bool sanitize (hb_sanitize_context_t *c) const
1472 {
1473 TRACE_SANITIZE (this);
1474 return_trace (inputCount.sanitize (c) &&
1475 lookupCount.sanitize (c) &&
1476 c->check_range (inputZ.arrayZ,
1477 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1478 LookupRecord::static_size * lookupCount));
1479 }
1480
1481 protected:
1482 HBUINT16 inputCount; /* Total number of glyphs in input
1483 * glyph sequence--includes the first
1484 * glyph */
1485 HBUINT16 lookupCount; /* Number of LookupRecords */
1486 UnsizedArrayOf<HBUINT16>
1487 inputZ; /* Array of match inputs--start with
1488 * second glyph */
1489/*UnsizedArrayOf<LookupRecord>
1490 lookupRecordX;*/ /* Array of LookupRecords--in
1491 * design order */
1492 public:
1493 DEFINE_SIZE_ARRAY (4, inputZ);
1494};
1495
1496struct RuleSet
1497{
1498 bool intersects (const hb_set_t *glyphs,
1499 ContextClosureLookupContext &lookup_context) const
1500 {
1501 return
1502 + hb_iter (rule)
1503 | hb_map (hb_add (this))
1504 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1505 | hb_any
1506 ;
1507 }
1508
1509 void closure (hb_closure_context_t *c,
1510 ContextClosureLookupContext &lookup_context) const
1511 {
1512 if (unlikely (c->lookup_limit_exceeded ())) return;
1513
1514 return
1515 + hb_iter (rule)
1516 | hb_map (hb_add (this))
1517 | hb_apply ([&] (const Rule &_) { _.closure (c, lookup_context); })
1518 ;
1519 }
1520
1521 void closure_lookups (hb_closure_lookups_context_t *c) const
1522 {
1523 if (unlikely (c->lookup_limit_exceeded ())) return;
1524
1525 return
1526 + hb_iter (rule)
1527 | hb_map (hb_add (this))
1528 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c); })
1529 ;
1530 }
1531
1532 void collect_glyphs (hb_collect_glyphs_context_t *c,
1533 ContextCollectGlyphsLookupContext &lookup_context) const
1534 {
1535 return
1536 + hb_iter (rule)
1537 | hb_map (hb_add (this))
1538 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1539 ;
1540 }
1541
1542 bool would_apply (hb_would_apply_context_t *c,
1543 ContextApplyLookupContext &lookup_context) const
1544 {
1545 return
1546 + hb_iter (rule)
1547 | hb_map (hb_add (this))
1548 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1549 | hb_any
1550 ;
1551 }
1552
1553 bool apply (hb_ot_apply_context_t *c,
1554 ContextApplyLookupContext &lookup_context) const
1555 {
1556 TRACE_APPLY (this);
1557 return_trace (
1558 + hb_iter (rule)
1559 | hb_map (hb_add (this))
1560 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1561 | hb_any
1562 )
1563 ;
1564 }
1565
1566 bool subset (hb_subset_context_t *c,
1567 const hb_map_t *lookup_map,
1568 const hb_map_t *klass_map = nullptr) const
1569 {
1570 TRACE_SUBSET (this);
1571
1572 auto snap = c->serializer->snapshot ();
1573 auto *out = c->serializer->start_embed (*this);
1574 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1575
1576 for (const OffsetTo<Rule>& _ : rule)
1577 {
1578 if (!_) continue;
1579 auto *o = out->rule.serialize_append (c->serializer);
1580 if (unlikely (!o)) continue;
1581
1582 auto o_snap = c->serializer->snapshot ();
1583 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1584 {
1585 out->rule.pop ();
1586 c->serializer->revert (o_snap);
1587 }
1588 }
1589
1590 bool ret = bool (out->rule);
1591 if (!ret) c->serializer->revert (snap);
1592
1593 return_trace (ret);
1594 }
1595
1596 bool sanitize (hb_sanitize_context_t *c) const
1597 {
1598 TRACE_SANITIZE (this);
1599 return_trace (rule.sanitize (c, this));
1600 }
1601
1602 protected:
1603 OffsetArrayOf<Rule>
1604 rule; /* Array of Rule tables
1605 * ordered by preference */
1606 public:
1607 DEFINE_SIZE_ARRAY (2, rule);
1608};
1609
1610
1611struct ContextFormat1
1612{
1613 bool intersects (const hb_set_t *glyphs) const
1614 {
1615 struct ContextClosureLookupContext lookup_context = {
1616 {intersects_glyph},
1617 nullptr
1618 };
1619
1620 return
1621 + hb_zip (this+coverage, ruleSet)
1622 | hb_filter (*glyphs, hb_first)
1623 | hb_map (hb_second)
1624 | hb_map (hb_add (this))
1625 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1626 | hb_any
1627 ;
1628 }
1629
1630 void closure (hb_closure_context_t *c) const
1631 {
1632 struct ContextClosureLookupContext lookup_context = {
1633 {intersects_glyph},
1634 nullptr
1635 };
1636
1637 + hb_zip (this+coverage, ruleSet)
1638 | hb_filter (*c->glyphs, hb_first)
1639 | hb_map (hb_second)
1640 | hb_map (hb_add (this))
1641 | hb_apply ([&] (const RuleSet &_) { _.closure (c, lookup_context); })
1642 ;
1643 }
1644
1645 void closure_lookups (hb_closure_lookups_context_t *c) const
1646 {
1647 + hb_iter (ruleSet)
1648 | hb_map (hb_add (this))
1649 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c); })
1650 ;
1651 }
1652
1653 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1654
1655 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1656 {
1657 (this+coverage).collect_coverage (c->input);
1658
1659 struct ContextCollectGlyphsLookupContext lookup_context = {
1660 {collect_glyph},
1661 nullptr
1662 };
1663
1664 + hb_iter (ruleSet)
1665 | hb_map (hb_add (this))
1666 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1667 ;
1668 }
1669
1670 bool would_apply (hb_would_apply_context_t *c) const
1671 {
1672 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1673 struct ContextApplyLookupContext lookup_context = {
1674 {match_glyph},
1675 nullptr
1676 };
1677 return rule_set.would_apply (c, lookup_context);
1678 }
1679
1680 const Coverage &get_coverage () const { return this+coverage; }
1681
1682 bool apply (hb_ot_apply_context_t *c) const
1683 {
1684 TRACE_APPLY (this);
1685 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1686 if (likely (index == NOT_COVERED))
1687 return_trace (false);
1688
1689 const RuleSet &rule_set = this+ruleSet[index];
1690 struct ContextApplyLookupContext lookup_context = {
1691 {match_glyph},
1692 nullptr
1693 };
1694 return_trace (rule_set.apply (c, lookup_context));
1695 }
1696
1697 bool subset (hb_subset_context_t *c) const
1698 {
1699 TRACE_SUBSET (this);
1700 const hb_set_t &glyphset = *c->plan->glyphset ();
1701 const hb_map_t &glyph_map = *c->plan->glyph_map;
1702
1703 auto *out = c->serializer->start_embed (*this);
1704 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1705 out->format = format;
1706
1707 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1708 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1709 + hb_zip (this+coverage, ruleSet)
1710 | hb_filter (glyphset, hb_first)
1711 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1712 | hb_map (hb_first)
1713 | hb_map (glyph_map)
1714 | hb_sink (new_coverage)
1715 ;
1716
1717 out->coverage.serialize (c->serializer, out)
1718 .serialize (c->serializer, new_coverage.iter ());
1719 return_trace (bool (new_coverage));
1720 }
1721
1722 bool sanitize (hb_sanitize_context_t *c) const
1723 {
1724 TRACE_SANITIZE (this);
1725 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1726 }
1727
1728 protected:
1729 HBUINT16 format; /* Format identifier--format = 1 */
1730 OffsetTo<Coverage>
1731 coverage; /* Offset to Coverage table--from
1732 * beginning of table */
1733 OffsetArrayOf<RuleSet>
1734 ruleSet; /* Array of RuleSet tables
1735 * ordered by Coverage Index */
1736 public:
1737 DEFINE_SIZE_ARRAY (6, ruleSet);
1738};
1739
1740
1741struct ContextFormat2
1742{
1743 bool intersects (const hb_set_t *glyphs) const
1744 {
1745 if (!(this+coverage).intersects (glyphs))
1746 return false;
1747
1748 const ClassDef &class_def = this+classDef;
1749
1750 struct ContextClosureLookupContext lookup_context = {
1751 {intersects_class},
1752 &class_def
1753 };
1754
1755 return
1756 + hb_iter (ruleSet)
1757 | hb_map (hb_add (this))
1758 | hb_enumerate
1759 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
1760 { return class_def.intersects_class (glyphs, p.first) &&
1761 p.second.intersects (glyphs, lookup_context); })
1762 | hb_any
1763 ;
1764 }
1765
1766 void closure (hb_closure_context_t *c) const
1767 {
1768 if (!(this+coverage).intersects (c->glyphs))
1769 return;
1770
1771 const ClassDef &class_def = this+classDef;
1772
1773 struct ContextClosureLookupContext lookup_context = {
1774 {intersects_class},
1775 &class_def
1776 };
1777
1778 return
1779 + hb_enumerate (ruleSet)
1780 | hb_filter ([&] (unsigned _)
1781 { return class_def.intersects_class (c->glyphs, _); },
1782 hb_first)
1783 | hb_map (hb_second)
1784 | hb_map (hb_add (this))
1785 | hb_apply ([&] (const RuleSet &_) { _.closure (c, lookup_context); })
1786 ;
1787 }
1788
1789 void closure_lookups (hb_closure_lookups_context_t *c) const
1790 {
1791 + hb_iter (ruleSet)
1792 | hb_map (hb_add (this))
1793 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c); })
1794 ;
1795 }
1796
1797 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1798
1799 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1800 {
1801 (this+coverage).collect_coverage (c->input);
1802
1803 const ClassDef &class_def = this+classDef;
1804 struct ContextCollectGlyphsLookupContext lookup_context = {
1805 {collect_class},
1806 &class_def
1807 };
1808
1809 + hb_iter (ruleSet)
1810 | hb_map (hb_add (this))
1811 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1812 ;
1813 }
1814
1815 bool would_apply (hb_would_apply_context_t *c) const
1816 {
1817 const ClassDef &class_def = this+classDef;
1818 unsigned int index = class_def.get_class (c->glyphs[0]);
1819 const RuleSet &rule_set = this+ruleSet[index];
1820 struct ContextApplyLookupContext lookup_context = {
1821 {match_class},
1822 &class_def
1823 };
1824 return rule_set.would_apply (c, lookup_context);
1825 }
1826
1827 const Coverage &get_coverage () const { return this+coverage; }
1828
1829 bool apply (hb_ot_apply_context_t *c) const
1830 {
1831 TRACE_APPLY (this);
1832 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1833 if (likely (index == NOT_COVERED)) return_trace (false);
1834
1835 const ClassDef &class_def = this+classDef;
1836 index = class_def.get_class (c->buffer->cur().codepoint);
1837 const RuleSet &rule_set = this+ruleSet[index];
1838 struct ContextApplyLookupContext lookup_context = {
1839 {match_class},
1840 &class_def
1841 };
1842 return_trace (rule_set.apply (c, lookup_context));
1843 }
1844
1845 bool subset (hb_subset_context_t *c) const
1846 {
1847 TRACE_SUBSET (this);
1848 auto *out = c->serializer->start_embed (*this);
1849 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1850 out->format = format;
1851 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
1852 return_trace (false);
1853
1854 hb_map_t klass_map;
1855 out->classDef.serialize_subset (c, classDef, this, &klass_map);
1856
1857 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1858 bool ret = true;
1859 unsigned non_zero_index = 0, index = 0;
1860 for (const hb_pair_t<unsigned, const OffsetTo<RuleSet>&> _ : + hb_enumerate (ruleSet)
1861 | hb_filter (klass_map, hb_first))
1862 {
1863 auto *o = out->ruleSet.serialize_append (c->serializer);
1864 if (unlikely (!o))
1865 {
1866 ret = false;
1867 break;
1868 }
1869
1870 if (o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
1871 non_zero_index = index;
1872
1873 index++;
1874 }
1875
1876 if (!ret) return_trace (ret);
1877
1878 //prune empty trailing ruleSets
1879 --index;
1880 while (index > non_zero_index)
1881 {
1882 out->ruleSet.pop ();
1883 index--;
1884 }
1885
1886 return_trace (bool (out->ruleSet));
1887 }
1888
1889 bool sanitize (hb_sanitize_context_t *c) const
1890 {
1891 TRACE_SANITIZE (this);
1892 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
1893 }
1894
1895 protected:
1896 HBUINT16 format; /* Format identifier--format = 2 */
1897 OffsetTo<Coverage>
1898 coverage; /* Offset to Coverage table--from
1899 * beginning of table */
1900 OffsetTo<ClassDef>
1901 classDef; /* Offset to glyph ClassDef table--from
1902 * beginning of table */
1903 OffsetArrayOf<RuleSet>
1904 ruleSet; /* Array of RuleSet tables
1905 * ordered by class */
1906 public:
1907 DEFINE_SIZE_ARRAY (8, ruleSet);
1908};
1909
1910
1911struct ContextFormat3
1912{
1913 bool intersects (const hb_set_t *glyphs) const
1914 {
1915 if (!(this+coverageZ[0]).intersects (glyphs))
1916 return false;
1917
1918 struct ContextClosureLookupContext lookup_context = {
1919 {intersects_coverage},
1920 this
1921 };
1922 return context_intersects (glyphs,
1923 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1924 lookup_context);
1925 }
1926
1927 void closure (hb_closure_context_t *c) const
1928 {
1929 if (!(this+coverageZ[0]).intersects (c->glyphs))
1930 return;
1931
1932 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1933 struct ContextClosureLookupContext lookup_context = {
1934 {intersects_coverage},
1935 this
1936 };
1937 context_closure_lookup (c,
1938 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1939 lookupCount, lookupRecord,
1940 lookup_context);
1941 }
1942
1943 void closure_lookups (hb_closure_lookups_context_t *c) const
1944 {
1945 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1946 recurse_lookups (c, lookupCount, lookupRecord);
1947 }
1948
1949 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1950
1951 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1952 {
1953 (this+coverageZ[0]).collect_coverage (c->input);
1954
1955 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1956 struct ContextCollectGlyphsLookupContext lookup_context = {
1957 {collect_coverage},
1958 this
1959 };
1960
1961 context_collect_glyphs_lookup (c,
1962 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1963 lookupCount, lookupRecord,
1964 lookup_context);
1965 }
1966
1967 bool would_apply (hb_would_apply_context_t *c) const
1968 {
1969 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1970 struct ContextApplyLookupContext lookup_context = {
1971 {match_coverage},
1972 this
1973 };
1974 return context_would_apply_lookup (c,
1975 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
1976 lookupCount, lookupRecord,
1977 lookup_context);
1978 }
1979
1980 const Coverage &get_coverage () const { return this+coverageZ[0]; }
1981
1982 bool apply (hb_ot_apply_context_t *c) const
1983 {
1984 TRACE_APPLY (this);
1985 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
1986 if (likely (index == NOT_COVERED)) return_trace (false);
1987
1988 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
1989 struct ContextApplyLookupContext lookup_context = {
1990 {match_coverage},
1991 this
1992 };
1993 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
1994 }
1995
1996 bool subset (hb_subset_context_t *c) const
1997 {
1998 TRACE_SUBSET (this);
1999 auto *out = c->serializer->start_embed (this);
2000 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2001
2002 out->format = format;
2003 out->glyphCount = glyphCount;
2004 out->lookupCount = lookupCount;
2005
2006 auto coverages = coverageZ.as_array (glyphCount);
2007
2008 for (const OffsetTo<Coverage>& offset : coverages)
2009 {
2010 auto *o = c->serializer->allocate_size<OffsetTo<Coverage>> (OffsetTo<Coverage>::static_size);
2011 if (unlikely (!o)) return_trace (false);
2012 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2013 }
2014
2015 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2016 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2017 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2018 c->serializer->copy (lookupRecord[i], lookup_map);
2019
2020 return_trace (true);
2021 }
2022
2023 bool sanitize (hb_sanitize_context_t *c) const
2024 {
2025 TRACE_SANITIZE (this);
2026 if (!c->check_struct (this)) return_trace (false);
2027 unsigned int count = glyphCount;
2028 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2029 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2030 for (unsigned int i = 0; i < count; i++)
2031 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2032 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2033 return_trace (c->check_array (lookupRecord, lookupCount));
2034 }
2035
2036 protected:
2037 HBUINT16 format; /* Format identifier--format = 3 */
2038 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2039 * sequence */
2040 HBUINT16 lookupCount; /* Number of LookupRecords */
2041 UnsizedArrayOf<OffsetTo<Coverage>>
2042 coverageZ; /* Array of offsets to Coverage
2043 * table in glyph sequence order */
2044/*UnsizedArrayOf<LookupRecord>
2045 lookupRecordX;*/ /* Array of LookupRecords--in
2046 * design order */
2047 public:
2048 DEFINE_SIZE_ARRAY (6, coverageZ);
2049};
2050
2051struct Context
2052{
2053 template <typename context_t, typename ...Ts>
2054 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2055 {
2056 TRACE_DISPATCH (this, u.format);
2057 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2058 switch (u.format) {
2059 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2060 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
2061 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
2062 default:return_trace (c->default_return_value ());
2063 }
2064 }
2065
2066 protected:
2067 union {
2068 HBUINT16 format; /* Format identifier */
2069 ContextFormat1 format1;
2070 ContextFormat2 format2;
2071 ContextFormat3 format3;
2072 } u;
2073};
2074
2075
2076/* Chaining Contextual lookups */
2077
2078struct ChainContextClosureLookupContext
2079{
2080 ContextClosureFuncs funcs;
2081 const void *intersects_data[3];
2082};
2083
2084struct ChainContextCollectGlyphsLookupContext
2085{
2086 ContextCollectGlyphsFuncs funcs;
2087 const void *collect_data[3];
2088};
2089
2090struct ChainContextApplyLookupContext
2091{
2092 ContextApplyFuncs funcs;
2093 const void *match_data[3];
2094};
2095
2096static inline bool chain_context_intersects (const hb_set_t *glyphs,
2097 unsigned int backtrackCount,
2098 const HBUINT16 backtrack[],
2099 unsigned int inputCount, /* Including the first glyph (not matched) */
2100 const HBUINT16 input[], /* Array of input values--start with second glyph */
2101 unsigned int lookaheadCount,
2102 const HBUINT16 lookahead[],
2103 ChainContextClosureLookupContext &lookup_context)
2104{
2105 return array_is_subset_of (glyphs,
2106 backtrackCount, backtrack,
2107 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2108 && array_is_subset_of (glyphs,
2109 inputCount ? inputCount - 1 : 0, input,
2110 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2111 && array_is_subset_of (glyphs,
2112 lookaheadCount, lookahead,
2113 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2114}
2115
2116static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2117 unsigned int backtrackCount,
2118 const HBUINT16 backtrack[],
2119 unsigned int inputCount, /* Including the first glyph (not matched) */
2120 const HBUINT16 input[], /* Array of input values--start with second glyph */
2121 unsigned int lookaheadCount,
2122 const HBUINT16 lookahead[],
2123 unsigned int lookupCount,
2124 const LookupRecord lookupRecord[],
2125 ChainContextClosureLookupContext &lookup_context)
2126{
2127 if (chain_context_intersects (c->glyphs,
2128 backtrackCount, backtrack,
2129 inputCount, input,
2130 lookaheadCount, lookahead,
2131 lookup_context))
2132 recurse_lookups (c,
2133 lookupCount, lookupRecord);
2134}
2135
2136static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2137 unsigned int backtrackCount,
2138 const HBUINT16 backtrack[],
2139 unsigned int inputCount, /* Including the first glyph (not matched) */
2140 const HBUINT16 input[], /* Array of input values--start with second glyph */
2141 unsigned int lookaheadCount,
2142 const HBUINT16 lookahead[],
2143 unsigned int lookupCount,
2144 const LookupRecord lookupRecord[],
2145 ChainContextCollectGlyphsLookupContext &lookup_context)
2146{
2147 collect_array (c, c->before,
2148 backtrackCount, backtrack,
2149 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2150 collect_array (c, c->input,
2151 inputCount ? inputCount - 1 : 0, input,
2152 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2153 collect_array (c, c->after,
2154 lookaheadCount, lookahead,
2155 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2156 recurse_lookups (c,
2157 lookupCount, lookupRecord);
2158}
2159
2160static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2161 unsigned int backtrackCount,
2162 const HBUINT16 backtrack[] HB_UNUSED,
2163 unsigned int inputCount, /* Including the first glyph (not matched) */
2164 const HBUINT16 input[], /* Array of input values--start with second glyph */
2165 unsigned int lookaheadCount,
2166 const HBUINT16 lookahead[] HB_UNUSED,
2167 unsigned int lookupCount HB_UNUSED,
2168 const LookupRecord lookupRecord[] HB_UNUSED,
2169 ChainContextApplyLookupContext &lookup_context)
2170{
2171 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2172 && would_match_input (c,
2173 inputCount, input,
2174 lookup_context.funcs.match, lookup_context.match_data[1]);
2175}
2176
2177static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2178 unsigned int backtrackCount,
2179 const HBUINT16 backtrack[],
2180 unsigned int inputCount, /* Including the first glyph (not matched) */
2181 const HBUINT16 input[], /* Array of input values--start with second glyph */
2182 unsigned int lookaheadCount,
2183 const HBUINT16 lookahead[],
2184 unsigned int lookupCount,
2185 const LookupRecord lookupRecord[],
2186 ChainContextApplyLookupContext &lookup_context)
2187{
2188 unsigned int start_index = 0, match_length = 0, end_index = 0;
2189 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
2190 return match_input (c,
2191 inputCount, input,
2192 lookup_context.funcs.match, lookup_context.match_data[1],
2193 &match_length, match_positions)
2194 && match_backtrack (c,
2195 backtrackCount, backtrack,
2196 lookup_context.funcs.match, lookup_context.match_data[0],
2197 &start_index)
2198 && match_lookahead (c,
2199 lookaheadCount, lookahead,
2200 lookup_context.funcs.match, lookup_context.match_data[2],
2201 match_length, &end_index)
2202 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index),
2203 apply_lookup (c,
2204 inputCount, match_positions,
2205 lookupCount, lookupRecord,
2206 match_length));
2207}
2208
2209struct ChainRule
2210{
2211 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2212 {
2213 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2214 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2215 return chain_context_intersects (glyphs,
2216 backtrack.len, backtrack.arrayZ,
2217 input.lenP1, input.arrayZ,
2218 lookahead.len, lookahead.arrayZ,
2219 lookup_context);
2220 }
2221
2222 void closure (hb_closure_context_t *c,
2223 ChainContextClosureLookupContext &lookup_context) const
2224 {
2225 if (unlikely (c->lookup_limit_exceeded ())) return;
2226
2227 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2228 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2229 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2230 chain_context_closure_lookup (c,
2231 backtrack.len, backtrack.arrayZ,
2232 input.lenP1, input.arrayZ,
2233 lookahead.len, lookahead.arrayZ,
2234 lookup.len, lookup.arrayZ,
2235 lookup_context);
2236 }
2237
2238 void closure_lookups (hb_closure_lookups_context_t *c) const
2239 {
2240 if (unlikely (c->lookup_limit_exceeded ())) return;
2241
2242 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2243 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2244 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2245 recurse_lookups (c, lookup.len, lookup.arrayZ);
2246 }
2247
2248 void collect_glyphs (hb_collect_glyphs_context_t *c,
2249 ChainContextCollectGlyphsLookupContext &lookup_context) const
2250 {
2251 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2252 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2253 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2254 chain_context_collect_glyphs_lookup (c,
2255 backtrack.len, backtrack.arrayZ,
2256 input.lenP1, input.arrayZ,
2257 lookahead.len, lookahead.arrayZ,
2258 lookup.len, lookup.arrayZ,
2259 lookup_context);
2260 }
2261
2262 bool would_apply (hb_would_apply_context_t *c,
2263 ChainContextApplyLookupContext &lookup_context) const
2264 {
2265 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2266 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2267 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2268 return chain_context_would_apply_lookup (c,
2269 backtrack.len, backtrack.arrayZ,
2270 input.lenP1, input.arrayZ,
2271 lookahead.len, lookahead.arrayZ, lookup.len,
2272 lookup.arrayZ, lookup_context);
2273 }
2274
2275 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2276 {
2277 TRACE_APPLY (this);
2278 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2279 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2280 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2281 return_trace (chain_context_apply_lookup (c,
2282 backtrack.len, backtrack.arrayZ,
2283 input.lenP1, input.arrayZ,
2284 lookahead.len, lookahead.arrayZ, lookup.len,
2285 lookup.arrayZ, lookup_context));
2286 }
2287
2288 template<typename Iterator,
2289 hb_requires (hb_is_iterator (Iterator))>
2290 void serialize_array (hb_serialize_context_t *c,
2291 HBUINT16 len,
2292 Iterator it) const
2293 {
2294 c->copy (len);
2295 for (const auto g : it)
2296 {
2297 HBUINT16 gid;
2298 gid = g;
2299 c->copy (gid);
2300 }
2301 }
2302
2303 ChainRule* copy (hb_serialize_context_t *c,
2304 const hb_map_t *lookup_map,
2305 const hb_map_t *backtrack_map,
2306 const hb_map_t *input_map = nullptr,
2307 const hb_map_t *lookahead_map = nullptr) const
2308 {
2309 TRACE_SERIALIZE (this);
2310 auto *out = c->start_embed (this);
2311 if (unlikely (!out)) return_trace (nullptr);
2312
2313 const hb_map_t *mapping = backtrack_map;
2314 serialize_array (c, backtrack.len, + backtrack.iter ()
2315 | hb_map (mapping));
2316
2317 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2318 if (input_map) mapping = input_map;
2319 serialize_array (c, input.lenP1, + input.iter ()
2320 | hb_map (mapping));
2321
2322 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2323 if (lookahead_map) mapping = lookahead_map;
2324 serialize_array (c, lookahead.len, + lookahead.iter ()
2325 | hb_map (mapping));
2326
2327 const ArrayOf<LookupRecord> &lookupRecord = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2328 HBUINT16 lookupCount;
2329 lookupCount = lookupRecord.len;
2330 if (!c->copy (lookupCount)) return_trace (nullptr);
2331
2332 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2333 if (!c->copy (lookupRecord[i], lookup_map)) return_trace (nullptr);
2334
2335 return_trace (out);
2336 }
2337
2338 bool subset (hb_subset_context_t *c,
2339 const hb_map_t *lookup_map,
2340 const hb_map_t *backtrack_map = nullptr,
2341 const hb_map_t *input_map = nullptr,
2342 const hb_map_t *lookahead_map = nullptr) const
2343 {
2344 TRACE_SUBSET (this);
2345
2346 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2347 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2348
2349 if (!backtrack_map)
2350 {
2351 const hb_set_t &glyphset = *c->plan->glyphset ();
2352 if (!hb_all (backtrack, glyphset) ||
2353 !hb_all (input, glyphset) ||
2354 !hb_all (lookahead, glyphset))
2355 return_trace (false);
2356
2357 copy (c->serializer, lookup_map, c->plan->glyph_map);
2358 }
2359 else
2360 {
2361 if (!hb_all (backtrack, backtrack_map) ||
2362 !hb_all (input, input_map) ||
2363 !hb_all (lookahead, lookahead_map))
2364 return_trace (false);
2365
2366 copy (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2367 }
2368
2369 return_trace (true);
2370 }
2371
2372 bool sanitize (hb_sanitize_context_t *c) const
2373 {
2374 TRACE_SANITIZE (this);
2375 if (!backtrack.sanitize (c)) return_trace (false);
2376 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2377 if (!input.sanitize (c)) return_trace (false);
2378 const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
2379 if (!lookahead.sanitize (c)) return_trace (false);
2380 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2381 return_trace (lookup.sanitize (c));
2382 }
2383
2384 protected:
2385 ArrayOf<HBUINT16>
2386 backtrack; /* Array of backtracking values
2387 * (to be matched before the input
2388 * sequence) */
2389 HeadlessArrayOf<HBUINT16>
2390 inputX; /* Array of input values (start with
2391 * second glyph) */
2392 ArrayOf<HBUINT16>
2393 lookaheadX; /* Array of lookahead values's (to be
2394 * matched after the input sequence) */
2395 ArrayOf<LookupRecord>
2396 lookupX; /* Array of LookupRecords--in
2397 * design order) */
2398 public:
2399 DEFINE_SIZE_MIN (8);
2400};
2401
2402struct ChainRuleSet
2403{
2404 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2405 {
2406 return
2407 + hb_iter (rule)
2408 | hb_map (hb_add (this))
2409 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2410 | hb_any
2411 ;
2412 }
2413 void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
2414 {
2415 if (unlikely (c->lookup_limit_exceeded ())) return;
2416
2417 return
2418 + hb_iter (rule)
2419 | hb_map (hb_add (this))
2420 | hb_apply ([&] (const ChainRule &_) { _.closure (c, lookup_context); })
2421 ;
2422 }
2423
2424 void closure_lookups (hb_closure_lookups_context_t *c) const
2425 {
2426 if (unlikely (c->lookup_limit_exceeded ())) return;
2427
2428 return
2429 + hb_iter (rule)
2430 | hb_map (hb_add (this))
2431 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c); })
2432 ;
2433 }
2434
2435 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2436 {
2437 return
2438 + hb_iter (rule)
2439 | hb_map (hb_add (this))
2440 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2441 ;
2442 }
2443
2444 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2445 {
2446 return
2447 + hb_iter (rule)
2448 | hb_map (hb_add (this))
2449 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2450 | hb_any
2451 ;
2452 }
2453
2454 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2455 {
2456 TRACE_APPLY (this);
2457 return_trace (
2458 + hb_iter (rule)
2459 | hb_map (hb_add (this))
2460 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2461 | hb_any
2462 )
2463 ;
2464 }
2465
2466 bool subset (hb_subset_context_t *c,
2467 const hb_map_t *lookup_map,
2468 const hb_map_t *backtrack_klass_map = nullptr,
2469 const hb_map_t *input_klass_map = nullptr,
2470 const hb_map_t *lookahead_klass_map = nullptr) const
2471 {
2472 TRACE_SUBSET (this);
2473
2474 auto snap = c->serializer->snapshot ();
2475 auto *out = c->serializer->start_embed (*this);
2476 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2477
2478 for (const OffsetTo<ChainRule>& _ : rule)
2479 {
2480 if (!_) continue;
2481 auto *o = out->rule.serialize_append (c->serializer);
2482 if (unlikely (!o)) continue;
2483
2484 auto o_snap = c->serializer->snapshot ();
2485 if (!o->serialize_subset (c, _, this,
2486 lookup_map,
2487 backtrack_klass_map,
2488 input_klass_map,
2489 lookahead_klass_map))
2490 {
2491 out->rule.pop ();
2492 c->serializer->revert (o_snap);
2493 }
2494 }
2495
2496 bool ret = bool (out->rule);
2497 if (!ret) c->serializer->revert (snap);
2498
2499 return_trace (ret);
2500 }
2501
2502 bool sanitize (hb_sanitize_context_t *c) const
2503 {
2504 TRACE_SANITIZE (this);
2505 return_trace (rule.sanitize (c, this));
2506 }
2507
2508 protected:
2509 OffsetArrayOf<ChainRule>
2510 rule; /* Array of ChainRule tables
2511 * ordered by preference */
2512 public:
2513 DEFINE_SIZE_ARRAY (2, rule);
2514};
2515
2516struct ChainContextFormat1
2517{
2518 bool intersects (const hb_set_t *glyphs) const
2519 {
2520 struct ChainContextClosureLookupContext lookup_context = {
2521 {intersects_glyph},
2522 {nullptr, nullptr, nullptr}
2523 };
2524
2525 return
2526 + hb_zip (this+coverage, ruleSet)
2527 | hb_filter (*glyphs, hb_first)
2528 | hb_map (hb_second)
2529 | hb_map (hb_add (this))
2530 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2531 | hb_any
2532 ;
2533 }
2534
2535 void closure (hb_closure_context_t *c) const
2536 {
2537 struct ChainContextClosureLookupContext lookup_context = {
2538 {intersects_glyph},
2539 {nullptr, nullptr, nullptr}
2540 };
2541
2542 + hb_zip (this+coverage, ruleSet)
2543 | hb_filter (*c->glyphs, hb_first)
2544 | hb_map (hb_second)
2545 | hb_map (hb_add (this))
2546 | hb_apply ([&] (const ChainRuleSet &_) { _.closure (c, lookup_context); })
2547 ;
2548 }
2549
2550 void closure_lookups (hb_closure_lookups_context_t *c) const
2551 {
2552 + hb_iter (ruleSet)
2553 | hb_map (hb_add (this))
2554 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c); })
2555 ;
2556 }
2557
2558 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2559
2560 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2561 {
2562 (this+coverage).collect_coverage (c->input);
2563
2564 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2565 {collect_glyph},
2566 {nullptr, nullptr, nullptr}
2567 };
2568
2569 + hb_iter (ruleSet)
2570 | hb_map (hb_add (this))
2571 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2572 ;
2573 }
2574
2575 bool would_apply (hb_would_apply_context_t *c) const
2576 {
2577 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2578 struct ChainContextApplyLookupContext lookup_context = {
2579 {match_glyph},
2580 {nullptr, nullptr, nullptr}
2581 };
2582 return rule_set.would_apply (c, lookup_context);
2583 }
2584
2585 const Coverage &get_coverage () const { return this+coverage; }
2586
2587 bool apply (hb_ot_apply_context_t *c) const
2588 {
2589 TRACE_APPLY (this);
2590 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2591 if (likely (index == NOT_COVERED)) return_trace (false);
2592
2593 const ChainRuleSet &rule_set = this+ruleSet[index];
2594 struct ChainContextApplyLookupContext lookup_context = {
2595 {match_glyph},
2596 {nullptr, nullptr, nullptr}
2597 };
2598 return_trace (rule_set.apply (c, lookup_context));
2599 }
2600
2601 bool subset (hb_subset_context_t *c) const
2602 {
2603 TRACE_SUBSET (this);
2604 const hb_set_t &glyphset = *c->plan->glyphset ();
2605 const hb_map_t &glyph_map = *c->plan->glyph_map;
2606
2607 auto *out = c->serializer->start_embed (*this);
2608 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2609 out->format = format;
2610
2611 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2612 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2613 + hb_zip (this+coverage, ruleSet)
2614 | hb_filter (glyphset, hb_first)
2615 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2616 | hb_map (hb_first)
2617 | hb_map (glyph_map)
2618 | hb_sink (new_coverage)
2619 ;
2620
2621 out->coverage.serialize (c->serializer, out)
2622 .serialize (c->serializer, new_coverage.iter ());
2623 return_trace (bool (new_coverage));
2624 }
2625
2626 bool sanitize (hb_sanitize_context_t *c) const
2627 {
2628 TRACE_SANITIZE (this);
2629 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2630 }
2631
2632 protected:
2633 HBUINT16 format; /* Format identifier--format = 1 */
2634 OffsetTo<Coverage>
2635 coverage; /* Offset to Coverage table--from
2636 * beginning of table */
2637 OffsetArrayOf<ChainRuleSet>
2638 ruleSet; /* Array of ChainRuleSet tables
2639 * ordered by Coverage Index */
2640 public:
2641 DEFINE_SIZE_ARRAY (6, ruleSet);
2642};
2643
2644struct ChainContextFormat2
2645{
2646 bool intersects (const hb_set_t *glyphs) const
2647 {
2648 if (!(this+coverage).intersects (glyphs))
2649 return false;
2650
2651 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2652 const ClassDef &input_class_def = this+inputClassDef;
2653 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2654
2655 struct ChainContextClosureLookupContext lookup_context = {
2656 {intersects_class},
2657 {&backtrack_class_def,
2658 &input_class_def,
2659 &lookahead_class_def}
2660 };
2661
2662 return
2663 + hb_iter (ruleSet)
2664 | hb_map (hb_add (this))
2665 | hb_enumerate
2666 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
2667 { return input_class_def.intersects_class (glyphs, p.first) &&
2668 p.second.intersects (glyphs, lookup_context); })
2669 | hb_any
2670 ;
2671 }
2672 void closure (hb_closure_context_t *c) const
2673 {
2674 if (!(this+coverage).intersects (c->glyphs))
2675 return;
2676
2677 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2678 const ClassDef &input_class_def = this+inputClassDef;
2679 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2680
2681 struct ChainContextClosureLookupContext lookup_context = {
2682 {intersects_class},
2683 {&backtrack_class_def,
2684 &input_class_def,
2685 &lookahead_class_def}
2686 };
2687
2688 return
2689 + hb_enumerate (ruleSet)
2690 | hb_filter ([&] (unsigned _)
2691 { return input_class_def.intersects_class (c->glyphs, _); },
2692 hb_first)
2693 | hb_map (hb_second)
2694 | hb_map (hb_add (this))
2695 | hb_apply ([&] (const ChainRuleSet &_) { _.closure (c, lookup_context); })
2696 ;
2697 }
2698
2699 void closure_lookups (hb_closure_lookups_context_t *c) const
2700 {
2701 + hb_iter (ruleSet)
2702 | hb_map (hb_add (this))
2703 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c); })
2704 ;
2705 }
2706
2707 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2708
2709 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2710 {
2711 (this+coverage).collect_coverage (c->input);
2712
2713 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2714 const ClassDef &input_class_def = this+inputClassDef;
2715 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2716
2717 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2718 {collect_class},
2719 {&backtrack_class_def,
2720 &input_class_def,
2721 &lookahead_class_def}
2722 };
2723
2724 + hb_iter (ruleSet)
2725 | hb_map (hb_add (this))
2726 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2727 ;
2728 }
2729
2730 bool would_apply (hb_would_apply_context_t *c) const
2731 {
2732 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2733 const ClassDef &input_class_def = this+inputClassDef;
2734 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2735
2736 unsigned int index = input_class_def.get_class (c->glyphs[0]);
2737 const ChainRuleSet &rule_set = this+ruleSet[index];
2738 struct ChainContextApplyLookupContext lookup_context = {
2739 {match_class},
2740 {&backtrack_class_def,
2741 &input_class_def,
2742 &lookahead_class_def}
2743 };
2744 return rule_set.would_apply (c, lookup_context);
2745 }
2746
2747 const Coverage &get_coverage () const { return this+coverage; }
2748
2749 bool apply (hb_ot_apply_context_t *c) const
2750 {
2751 TRACE_APPLY (this);
2752 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2753 if (likely (index == NOT_COVERED)) return_trace (false);
2754
2755 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2756 const ClassDef &input_class_def = this+inputClassDef;
2757 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2758
2759 index = input_class_def.get_class (c->buffer->cur().codepoint);
2760 const ChainRuleSet &rule_set = this+ruleSet[index];
2761 struct ChainContextApplyLookupContext lookup_context = {
2762 {match_class},
2763 {&backtrack_class_def,
2764 &input_class_def,
2765 &lookahead_class_def}
2766 };
2767 return_trace (rule_set.apply (c, lookup_context));
2768 }
2769
2770 bool subset (hb_subset_context_t *c) const
2771 {
2772 TRACE_SUBSET (this);
2773 auto *out = c->serializer->start_embed (*this);
2774 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2775 out->format = format;
2776 out->coverage.serialize_subset (c, coverage, this);
2777
2778 hb_map_t backtrack_klass_map;
2779 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
2780
2781 // subset inputClassDef based on glyphs survived in Coverage subsetting
2782 hb_map_t input_klass_map;
2783 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
2784
2785 hb_map_t lookahead_klass_map;
2786 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
2787
2788 unsigned non_zero_index = 0, index = 0;
2789 bool ret = true;
2790 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2791 for (const OffsetTo<ChainRuleSet>& _ : + hb_enumerate (ruleSet)
2792 | hb_filter (input_klass_map, hb_first)
2793 | hb_map (hb_second))
2794 {
2795 auto *o = out->ruleSet.serialize_append (c->serializer);
2796 if (unlikely (!o))
2797 {
2798 ret = false;
2799 break;
2800 }
2801 if (o->serialize_subset (c, _, this,
2802 lookup_map,
2803 &backtrack_klass_map,
2804 &input_klass_map,
2805 &lookahead_klass_map))
2806 non_zero_index = index;
2807
2808 index++;
2809 }
2810
2811 if (!ret) return_trace (ret);
2812
2813 //prune empty trailing ruleSets
2814 --index;
2815 while (index > non_zero_index)
2816 {
2817 out->ruleSet.pop ();
2818 index--;
2819 }
2820
2821 return_trace (bool (out->ruleSet));
2822 }
2823
2824 bool sanitize (hb_sanitize_context_t *c) const
2825 {
2826 TRACE_SANITIZE (this);
2827 return_trace (coverage.sanitize (c, this) &&
2828 backtrackClassDef.sanitize (c, this) &&
2829 inputClassDef.sanitize (c, this) &&
2830 lookaheadClassDef.sanitize (c, this) &&
2831 ruleSet.sanitize (c, this));
2832 }
2833
2834 protected:
2835 HBUINT16 format; /* Format identifier--format = 2 */
2836 OffsetTo<Coverage>
2837 coverage; /* Offset to Coverage table--from
2838 * beginning of table */
2839 OffsetTo<ClassDef>
2840 backtrackClassDef; /* Offset to glyph ClassDef table
2841 * containing backtrack sequence
2842 * data--from beginning of table */
2843 OffsetTo<ClassDef>
2844 inputClassDef; /* Offset to glyph ClassDef
2845 * table containing input sequence
2846 * data--from beginning of table */
2847 OffsetTo<ClassDef>
2848 lookaheadClassDef; /* Offset to glyph ClassDef table
2849 * containing lookahead sequence
2850 * data--from beginning of table */
2851 OffsetArrayOf<ChainRuleSet>
2852 ruleSet; /* Array of ChainRuleSet tables
2853 * ordered by class */
2854 public:
2855 DEFINE_SIZE_ARRAY (12, ruleSet);
2856};
2857
2858struct ChainContextFormat3
2859{
2860 bool intersects (const hb_set_t *glyphs) const
2861 {
2862 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2863
2864 if (!(this+input[0]).intersects (glyphs))
2865 return false;
2866
2867 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2868 struct ChainContextClosureLookupContext lookup_context = {
2869 {intersects_coverage},
2870 {this, this, this}
2871 };
2872 return chain_context_intersects (glyphs,
2873 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2874 input.len, (const HBUINT16 *) input.arrayZ + 1,
2875 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2876 lookup_context);
2877 }
2878
2879 void closure (hb_closure_context_t *c) const
2880 {
2881 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2882
2883 if (!(this+input[0]).intersects (c->glyphs))
2884 return;
2885
2886 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2887 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2888 struct ChainContextClosureLookupContext lookup_context = {
2889 {intersects_coverage},
2890 {this, this, this}
2891 };
2892 chain_context_closure_lookup (c,
2893 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2894 input.len, (const HBUINT16 *) input.arrayZ + 1,
2895 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2896 lookup.len, lookup.arrayZ,
2897 lookup_context);
2898 }
2899
2900 void closure_lookups (hb_closure_lookups_context_t *c) const
2901 {
2902 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2903 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2904 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2905 recurse_lookups (c, lookup.len, lookup.arrayZ);
2906 }
2907
2908 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2909
2910 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2911 {
2912 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2913
2914 (this+input[0]).collect_coverage (c->input);
2915
2916 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2917 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2918 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2919 {collect_coverage},
2920 {this, this, this}
2921 };
2922 chain_context_collect_glyphs_lookup (c,
2923 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2924 input.len, (const HBUINT16 *) input.arrayZ + 1,
2925 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2926 lookup.len, lookup.arrayZ,
2927 lookup_context);
2928 }
2929
2930 bool would_apply (hb_would_apply_context_t *c) const
2931 {
2932 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2933 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2934 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2935 struct ChainContextApplyLookupContext lookup_context = {
2936 {match_coverage},
2937 {this, this, this}
2938 };
2939 return chain_context_would_apply_lookup (c,
2940 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2941 input.len, (const HBUINT16 *) input.arrayZ + 1,
2942 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2943 lookup.len, lookup.arrayZ, lookup_context);
2944 }
2945
2946 const Coverage &get_coverage () const
2947 {
2948 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2949 return this+input[0];
2950 }
2951
2952 bool apply (hb_ot_apply_context_t *c) const
2953 {
2954 TRACE_APPLY (this);
2955 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
2956
2957 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
2958 if (likely (index == NOT_COVERED)) return_trace (false);
2959
2960 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
2961 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
2962 struct ChainContextApplyLookupContext lookup_context = {
2963 {match_coverage},
2964 {this, this, this}
2965 };
2966 return_trace (chain_context_apply_lookup (c,
2967 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
2968 input.len, (const HBUINT16 *) input.arrayZ + 1,
2969 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
2970 lookup.len, lookup.arrayZ, lookup_context));
2971 }
2972
2973 template<typename Iterator,
2974 hb_requires (hb_is_iterator (Iterator))>
2975 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
2976 {
2977 TRACE_SERIALIZE (this);
2978 auto *out = c->serializer->start_embed<OffsetArrayOf<Coverage>> ();
2979
2980 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size))) return_trace (false);
2981
2982 + it
2983 | hb_apply (subset_offset_array (c, *out, base))
2984 ;
2985
2986 return_trace (out->len);
2987 }
2988
2989 bool subset (hb_subset_context_t *c) const
2990 {
2991 TRACE_SUBSET (this);
2992
2993 auto *out = c->serializer->start_embed (this);
2994 if (unlikely (!out)) return_trace (false);
2995 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
2996
2997 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
2998 return_trace (false);
2999
3000 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
3001 if (!serialize_coverage_offsets (c, input.iter (), this))
3002 return_trace (false);
3003
3004 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
3005 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3006 return_trace (false);
3007
3008 const ArrayOf<LookupRecord> &lookupRecord = StructAfter<ArrayOf<LookupRecord>> (lookahead);
3009 HBUINT16 lookupCount;
3010 lookupCount = lookupRecord.len;
3011 if (!c->serializer->copy (lookupCount)) return_trace (false);
3012
3013 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3014 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
3015 if (!c->serializer->copy (lookupRecord[i], lookup_map)) return_trace (false);
3016
3017 return_trace (true);
3018 }
3019
3020 bool sanitize (hb_sanitize_context_t *c) const
3021 {
3022 TRACE_SANITIZE (this);
3023 if (!backtrack.sanitize (c, this)) return_trace (false);
3024 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
3025 if (!input.sanitize (c, this)) return_trace (false);
3026 if (!input.len) return_trace (false); /* To be consistent with Context. */
3027 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
3028 if (!lookahead.sanitize (c, this)) return_trace (false);
3029 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
3030 return_trace (lookup.sanitize (c));
3031 }
3032
3033 protected:
3034 HBUINT16 format; /* Format identifier--format = 3 */
3035 OffsetArrayOf<Coverage>
3036 backtrack; /* Array of coverage tables
3037 * in backtracking sequence, in glyph
3038 * sequence order */
3039 OffsetArrayOf<Coverage>
3040 inputX ; /* Array of coverage
3041 * tables in input sequence, in glyph
3042 * sequence order */
3043 OffsetArrayOf<Coverage>
3044 lookaheadX; /* Array of coverage tables
3045 * in lookahead sequence, in glyph
3046 * sequence order */
3047 ArrayOf<LookupRecord>
3048 lookupX; /* Array of LookupRecords--in
3049 * design order) */
3050 public:
3051 DEFINE_SIZE_MIN (10);
3052};
3053
3054struct ChainContext
3055{
3056 template <typename context_t, typename ...Ts>
3057 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3058 {
3059 TRACE_DISPATCH (this, u.format);
3060 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3061 switch (u.format) {
3062 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
3063 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
3064 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
3065 default:return_trace (c->default_return_value ());
3066 }
3067 }
3068
3069 protected:
3070 union {
3071 HBUINT16 format; /* Format identifier */
3072 ChainContextFormat1 format1;
3073 ChainContextFormat2 format2;
3074 ChainContextFormat3 format3;
3075 } u;
3076};
3077
3078
3079template <typename T>
3080struct ExtensionFormat1
3081{
3082 unsigned int get_type () const { return extensionLookupType; }
3083
3084 template <typename X>
3085 const X& get_subtable () const
3086 { return this + reinterpret_cast<const LOffsetTo<typename T::SubTable> &> (extensionOffset); }
3087
3088 template <typename context_t, typename ...Ts>
3089 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3090 {
3091 TRACE_DISPATCH (this, format);
3092 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3093 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), hb_forward<Ts> (ds)...));
3094 }
3095
3096 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3097 { dispatch (c); }
3098
3099 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
3100 bool sanitize (hb_sanitize_context_t *c) const
3101 {
3102 TRACE_SANITIZE (this);
3103 return_trace (c->check_struct (this) &&
3104 extensionLookupType != T::SubTable::Extension);
3105 }
3106
3107 protected:
3108 HBUINT16 format; /* Format identifier. Set to 1. */
3109 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3110 * by ExtensionOffset (i.e. the
3111 * extension subtable). */
3112 Offset32 extensionOffset; /* Offset to the extension subtable,
3113 * of lookup type subtable. */
3114 public:
3115 DEFINE_SIZE_STATIC (8);
3116};
3117
3118template <typename T>
3119struct Extension
3120{
3121 unsigned int get_type () const
3122 {
3123 switch (u.format) {
3124 case 1: return u.format1.get_type ();
3125 default:return 0;
3126 }
3127 }
3128 template <typename X>
3129 const X& get_subtable () const
3130 {
3131 switch (u.format) {
3132 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3133 default:return Null (typename T::SubTable);
3134 }
3135 }
3136
3137 template <typename context_t, typename ...Ts>
3138 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3139 {
3140 TRACE_DISPATCH (this, u.format);
3141 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3142 switch (u.format) {
3143 case 1: return_trace (u.format1.dispatch (c, hb_forward<Ts> (ds)...));
3144 default:return_trace (c->default_return_value ());
3145 }
3146 }
3147
3148 protected:
3149 union {
3150 HBUINT16 format; /* Format identifier */
3151 ExtensionFormat1<T> format1;
3152 } u;
3153};
3154
3155
3156/*
3157 * GSUB/GPOS Common
3158 */
3159
3160struct hb_ot_layout_lookup_accelerator_t
3161{
3162 template <typename TLookup>
3163 void init (const TLookup &lookup)
3164 {
3165 digest.init ();
3166 lookup.collect_coverage (&digest);
3167
3168 subtables.init ();
3169 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3170 lookup.dispatch (&c_get_subtables);
3171 }
3172 void fini () { subtables.fini (); }
3173
3174 bool may_have (hb_codepoint_t g) const
3175 { return digest.may_have (g); }
3176
3177 bool apply (hb_ot_apply_context_t *c) const
3178 {
3179 for (unsigned int i = 0; i < subtables.length; i++)
3180 if (subtables[i].apply (c))
3181 return true;
3182 return false;
3183 }
3184
3185 private:
3186 hb_set_digest_t digest;
3187 hb_get_subtables_context_t::array_t subtables;
3188};
3189
3190struct GSUBGPOS
3191{
3192 bool has_data () const { return version.to_int (); }
3193 unsigned int get_script_count () const
3194 { return (this+scriptList).len; }
3195 const Tag& get_script_tag (unsigned int i) const
3196 { return (this+scriptList).get_tag (i); }
3197 unsigned int get_script_tags (unsigned int start_offset,
3198 unsigned int *script_count /* IN/OUT */,
3199 hb_tag_t *script_tags /* OUT */) const
3200 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
3201 const Script& get_script (unsigned int i) const
3202 { return (this+scriptList)[i]; }
3203 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3204 { return (this+scriptList).find_index (tag, index); }
3205
3206 unsigned int get_feature_count () const
3207 { return (this+featureList).len; }
3208 hb_tag_t get_feature_tag (unsigned int i) const
3209 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
3210 unsigned int get_feature_tags (unsigned int start_offset,
3211 unsigned int *feature_count /* IN/OUT */,
3212 hb_tag_t *feature_tags /* OUT */) const
3213 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
3214 const Feature& get_feature (unsigned int i) const
3215 { return (this+featureList)[i]; }
3216 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3217 { return (this+featureList).find_index (tag, index); }
3218
3219 unsigned int get_lookup_count () const
3220 { return (this+lookupList).len; }
3221 const Lookup& get_lookup (unsigned int i) const
3222 { return (this+lookupList)[i]; }
3223
3224 bool find_variations_index (const int *coords, unsigned int num_coords,
3225 unsigned int *index) const
3226 {
3227#ifdef HB_NO_VAR
3228 *index = FeatureVariations::NOT_FOUND_INDEX;
3229 return false;
3230#endif
3231 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3232 .find_index (coords, num_coords, index);
3233 }
3234 const Feature& get_feature_variation (unsigned int feature_index,
3235 unsigned int variations_index) const
3236 {
3237#ifndef HB_NO_VAR
3238 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3239 version.to_int () >= 0x00010001u)
3240 {
3241 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3242 feature_index);
3243 if (feature)
3244 return *feature;
3245 }
3246#endif
3247 return get_feature (feature_index);
3248 }
3249
3250 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3251 hb_set_t *lookup_indexes /* OUT */) const
3252 {
3253#ifndef HB_NO_VAR
3254 if (version.to_int () >= 0x00010001u)
3255 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3256#endif
3257 }
3258
3259 template <typename TLookup>
3260 void closure_lookups (hb_face_t *face,
3261 const hb_set_t *glyphs,
3262 hb_set_t *lookup_indexes /* IN/OUT */) const
3263 {
3264 hb_set_t visited_lookups, inactive_lookups;
3265 OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
3266
3267 for (unsigned lookup_index : + hb_iter (lookup_indexes))
3268 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
3269
3270 hb_set_union (lookup_indexes, &visited_lookups);
3271 hb_set_subtract (lookup_indexes, &inactive_lookups);
3272 }
3273
3274 template <typename TLookup>
3275 bool subset (hb_subset_layout_context_t *c) const
3276 {
3277 TRACE_SUBSET (this);
3278 auto *out = c->subset_context->serializer->embed (*this);
3279 if (unlikely (!out)) return_trace (false);
3280
3281 typedef LookupOffsetList<TLookup> TLookupList;
3282 reinterpret_cast<OffsetTo<TLookupList> &> (out->lookupList)
3283 .serialize_subset (c->subset_context,
3284 reinterpret_cast<const OffsetTo<TLookupList> &> (lookupList),
3285 this,
3286 c);
3287
3288 reinterpret_cast<OffsetTo<RecordListOfFeature> &> (out->featureList)
3289 .serialize_subset (c->subset_context,
3290 reinterpret_cast<const OffsetTo<RecordListOfFeature> &> (featureList),
3291 this,
3292 c);
3293
3294 out->scriptList.serialize_subset (c->subset_context,
3295 scriptList,
3296 this,
3297 c);
3298
3299#ifndef HB_NO_VAR
3300 if (version.to_int () >= 0x00010001u)
3301 {
3302 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3303 if (!ret)
3304 {
3305 out->version.major = 1;
3306 out->version.minor = 0;
3307 }
3308 }
3309#endif
3310
3311 return_trace (true);
3312 }
3313
3314 void closure_features (const hb_map_t *lookup_indexes, /* IN */
3315 hb_set_t *feature_indexes /* OUT */) const
3316 {
3317 unsigned int feature_count = hb_min (get_feature_count (), (unsigned) HB_MAX_FEATURES);
3318 for (unsigned i = 0; i < feature_count; i++)
3319 {
3320 const Feature& f = get_feature (i);
3321 if ((!f.featureParams.is_null ()) || f.intersects_lookup_indexes (lookup_indexes))
3322 feature_indexes->add (i);
3323 }
3324#ifndef HB_NO_VAR
3325 if (version.to_int () >= 0x00010001u)
3326 (this+featureVars).closure_features (lookup_indexes, feature_indexes);
3327#endif
3328 }
3329
3330 unsigned int get_size () const
3331 {
3332 return min_size +
3333 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3334 }
3335
3336 template <typename TLookup>
3337 bool sanitize (hb_sanitize_context_t *c) const
3338 {
3339 TRACE_SANITIZE (this);
3340 typedef OffsetListOf<TLookup> TLookupList;
3341 if (unlikely (!(version.sanitize (c) &&
3342 likely (version.major == 1) &&
3343 scriptList.sanitize (c, this) &&
3344 featureList.sanitize (c, this) &&
3345 reinterpret_cast<const OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
3346 return_trace (false);
3347
3348#ifndef HB_NO_VAR
3349 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3350 return_trace (false);
3351#endif
3352
3353 return_trace (true);
3354 }
3355
3356 template <typename T>
3357 struct accelerator_t
3358 {
3359 void init (hb_face_t *face)
3360 {
3361 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3362 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
3363 {
3364 hb_blob_destroy (this->table.get_blob ());
3365 this->table = hb_blob_get_empty ();
3366 }
3367
3368 this->lookup_count = table->get_lookup_count ();
3369
3370 this->accels = (hb_ot_layout_lookup_accelerator_t *) calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3371 if (unlikely (!this->accels))
3372 this->lookup_count = 0;
3373
3374 for (unsigned int i = 0; i < this->lookup_count; i++)
3375 this->accels[i].init (table->get_lookup (i));
3376 }
3377
3378 void fini ()
3379 {
3380 for (unsigned int i = 0; i < this->lookup_count; i++)
3381 this->accels[i].fini ();
3382 free (this->accels);
3383 this->table.destroy ();
3384 }
3385
3386 hb_blob_ptr_t<T> table;
3387 unsigned int lookup_count;
3388 hb_ot_layout_lookup_accelerator_t *accels;
3389 };
3390
3391 protected:
3392 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3393 * to 0x00010000u */
3394 OffsetTo<ScriptList>
3395 scriptList; /* ScriptList table */
3396 OffsetTo<FeatureList>
3397 featureList; /* FeatureList table */
3398 OffsetTo<LookupList>
3399 lookupList; /* LookupList table */
3400 LOffsetTo<FeatureVariations>
3401 featureVars; /* Offset to Feature Variations
3402 table--from beginning of table
3403 * (may be NULL). Introduced
3404 * in version 0x00010001. */
3405 public:
3406 DEFINE_SIZE_MIN (10);
3407};
3408
3409
3410} /* namespace OT */
3411
3412
3413#endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
3414