1/*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_GSUB_TABLE_HH
30#define HB_OT_LAYOUT_GSUB_TABLE_HH
31
32#include "hb-ot-layout-gsubgpos.hh"
33
34
35namespace OT {
36
37typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> hb_codepoint_pair_t;
38
39template<typename Iterator>
40static inline void SingleSubst_serialize (hb_serialize_context_t *c,
41 Iterator it);
42
43
44struct SingleSubstFormat1
45{
46 bool intersects (const hb_set_t *glyphs) const
47 { return (this+coverage).intersects (glyphs); }
48
49 void closure (hb_closure_context_t *c) const
50 {
51 unsigned d = deltaGlyphID;
52 + hb_iter (this+coverage)
53 | hb_filter (*c->glyphs)
54 | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; })
55 | hb_sink (c->output)
56 ;
57 }
58
59 void collect_glyphs (hb_collect_glyphs_context_t *c) const
60 {
61 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
62 unsigned d = deltaGlyphID;
63 + hb_iter (this+coverage)
64 | hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; })
65 | hb_sink (c->output)
66 ;
67 }
68
69 const Coverage &get_coverage () const { return this+coverage; }
70
71 bool would_apply (hb_would_apply_context_t *c) const
72 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
73
74 bool apply (hb_ot_apply_context_t *c) const
75 {
76 TRACE_APPLY (this);
77 hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
78 unsigned int index = (this+coverage).get_coverage (glyph_id);
79 if (likely (index == NOT_COVERED)) return_trace (false);
80
81 /* According to the Adobe Annotated OpenType Suite, result is always
82 * limited to 16bit. */
83 glyph_id = (glyph_id + deltaGlyphID) & 0xFFFFu;
84 c->replace_glyph (glyph_id);
85
86 return_trace (true);
87 }
88
89 template<typename Iterator,
90 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
91 bool serialize (hb_serialize_context_t *c,
92 Iterator glyphs,
93 unsigned delta)
94 {
95 TRACE_SERIALIZE (this);
96 if (unlikely (!c->extend_min (*this))) return_trace (false);
97 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
98 c->check_assign (deltaGlyphID, delta);
99 return_trace (true);
100 }
101
102 bool subset (hb_subset_context_t *c) const
103 {
104 TRACE_SUBSET (this);
105 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
106 const hb_map_t &glyph_map = *c->plan->glyph_map;
107
108 hb_codepoint_t delta = deltaGlyphID;
109
110 auto it =
111 + hb_iter (this+coverage)
112 | hb_filter (glyphset)
113 | hb_map_retains_sorting ([&] (hb_codepoint_t g) {
114 return hb_codepoint_pair_t (g,
115 (g + delta) & 0xFFFF); })
116 | hb_filter (glyphset, hb_second)
117 | hb_map_retains_sorting ([&] (hb_codepoint_pair_t p) -> hb_codepoint_pair_t
118 { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
119 ;
120
121 bool ret = bool (it);
122 SingleSubst_serialize (c->serializer, it);
123 return_trace (ret);
124 }
125
126 bool sanitize (hb_sanitize_context_t *c) const
127 {
128 TRACE_SANITIZE (this);
129 return_trace (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
130 }
131
132 protected:
133 HBUINT16 format; /* Format identifier--format = 1 */
134 OffsetTo<Coverage>
135 coverage; /* Offset to Coverage table--from
136 * beginning of Substitution table */
137 HBUINT16 deltaGlyphID; /* Add to original GlyphID to get
138 * substitute GlyphID, modulo 0x10000 */
139 public:
140 DEFINE_SIZE_STATIC (6);
141};
142
143struct SingleSubstFormat2
144{
145 bool intersects (const hb_set_t *glyphs) const
146 { return (this+coverage).intersects (glyphs); }
147
148 void closure (hb_closure_context_t *c) const
149 {
150 + hb_zip (this+coverage, substitute)
151 | hb_filter (*c->glyphs, hb_first)
152 | hb_map (hb_second)
153 | hb_sink (c->output)
154 ;
155 }
156
157 void collect_glyphs (hb_collect_glyphs_context_t *c) const
158 {
159 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
160 + hb_zip (this+coverage, substitute)
161 | hb_map (hb_second)
162 | hb_sink (c->output)
163 ;
164 }
165
166 const Coverage &get_coverage () const { return this+coverage; }
167
168 bool would_apply (hb_would_apply_context_t *c) const
169 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
170
171 bool apply (hb_ot_apply_context_t *c) const
172 {
173 TRACE_APPLY (this);
174 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
175 if (likely (index == NOT_COVERED)) return_trace (false);
176
177 if (unlikely (index >= substitute.len)) return_trace (false);
178
179 c->replace_glyph (substitute[index]);
180
181 return_trace (true);
182 }
183
184 template<typename Iterator,
185 hb_requires (hb_is_sorted_source_of (Iterator,
186 hb_codepoint_pair_t))>
187 bool serialize (hb_serialize_context_t *c,
188 Iterator it)
189 {
190 TRACE_SERIALIZE (this);
191 auto substitutes =
192 + it
193 | hb_map (hb_second)
194 ;
195 auto glyphs =
196 + it
197 | hb_map_retains_sorting (hb_first)
198 ;
199 if (unlikely (!c->extend_min (*this))) return_trace (false);
200 if (unlikely (!substitute.serialize (c, substitutes))) return_trace (false);
201 if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
202 return_trace (true);
203 }
204
205 bool subset (hb_subset_context_t *c) const
206 {
207 TRACE_SUBSET (this);
208 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
209 const hb_map_t &glyph_map = *c->plan->glyph_map;
210
211 auto it =
212 + hb_zip (this+coverage, substitute)
213 | hb_filter (glyphset, hb_first)
214 | hb_filter (glyphset, hb_second)
215 | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const HBGlyphID &> p) -> hb_codepoint_pair_t
216 { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
217 ;
218
219 bool ret = bool (it);
220 SingleSubst_serialize (c->serializer, it);
221 return_trace (ret);
222 }
223
224 bool sanitize (hb_sanitize_context_t *c) const
225 {
226 TRACE_SANITIZE (this);
227 return_trace (coverage.sanitize (c, this) && substitute.sanitize (c));
228 }
229
230 protected:
231 HBUINT16 format; /* Format identifier--format = 2 */
232 OffsetTo<Coverage>
233 coverage; /* Offset to Coverage table--from
234 * beginning of Substitution table */
235 ArrayOf<HBGlyphID>
236 substitute; /* Array of substitute
237 * GlyphIDs--ordered by Coverage Index */
238 public:
239 DEFINE_SIZE_ARRAY (6, substitute);
240};
241
242struct SingleSubst
243{
244
245 template<typename Iterator,
246 hb_requires (hb_is_sorted_source_of (Iterator,
247 const hb_codepoint_pair_t))>
248 bool serialize (hb_serialize_context_t *c,
249 Iterator glyphs)
250 {
251 TRACE_SERIALIZE (this);
252 if (unlikely (!c->extend_min (u.format))) return_trace (false);
253 unsigned format = 2;
254 unsigned delta = 0;
255 if (glyphs)
256 {
257 format = 1;
258 auto get_delta = [=] (hb_codepoint_pair_t _) {
259 return (unsigned) (_.second - _.first) & 0xFFFF;
260 };
261 delta = get_delta (*glyphs);
262 if (!hb_all (++(+glyphs), delta, get_delta)) format = 2;
263 }
264 u.format = format;
265 switch (u.format) {
266 case 1: return_trace (u.format1.serialize (c,
267 + glyphs
268 | hb_map_retains_sorting (hb_first),
269 delta));
270 case 2: return_trace (u.format2.serialize (c, glyphs));
271 default:return_trace (false);
272 }
273 }
274
275 template <typename context_t, typename ...Ts>
276 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
277 {
278 TRACE_DISPATCH (this, u.format);
279 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
280 switch (u.format) {
281 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
282 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
283 default:return_trace (c->default_return_value ());
284 }
285 }
286
287 protected:
288 union {
289 HBUINT16 format; /* Format identifier */
290 SingleSubstFormat1 format1;
291 SingleSubstFormat2 format2;
292 } u;
293};
294
295template<typename Iterator>
296static inline void
297SingleSubst_serialize (hb_serialize_context_t *c,
298 Iterator it)
299{ c->start_embed<SingleSubst> ()->serialize (c, it); }
300
301struct Sequence
302{
303 bool intersects (const hb_set_t *glyphs) const
304 { return hb_all (substitute, glyphs); }
305
306 void closure (hb_closure_context_t *c) const
307 { c->output->add_array (substitute.arrayZ, substitute.len); }
308
309 void collect_glyphs (hb_collect_glyphs_context_t *c) const
310 { c->output->add_array (substitute.arrayZ, substitute.len); }
311
312 bool apply (hb_ot_apply_context_t *c) const
313 {
314 TRACE_APPLY (this);
315 unsigned int count = substitute.len;
316
317 /* Special-case to make it in-place and not consider this
318 * as a "multiplied" substitution. */
319 if (unlikely (count == 1))
320 {
321 c->replace_glyph (substitute.arrayZ[0]);
322 return_trace (true);
323 }
324 /* Spec disallows this, but Uniscribe allows it.
325 * https://github.com/harfbuzz/harfbuzz/issues/253 */
326 else if (unlikely (count == 0))
327 {
328 c->buffer->delete_glyph ();
329 return_trace (true);
330 }
331
332 unsigned int klass = _hb_glyph_info_is_ligature (&c->buffer->cur()) ?
333 HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0;
334
335 for (unsigned int i = 0; i < count; i++) {
336 _hb_glyph_info_set_lig_props_for_component (&c->buffer->cur(), i);
337 c->output_glyph_for_component (substitute.arrayZ[i], klass);
338 }
339 c->buffer->skip_glyph ();
340
341 return_trace (true);
342 }
343
344 template <typename Iterator,
345 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
346 bool serialize (hb_serialize_context_t *c,
347 Iterator subst)
348 {
349 TRACE_SERIALIZE (this);
350 return_trace (substitute.serialize (c, subst));
351 }
352
353 bool subset (hb_subset_context_t *c) const
354 {
355 TRACE_SUBSET (this);
356 const hb_set_t &glyphset = *c->plan->glyphset ();
357 const hb_map_t &glyph_map = *c->plan->glyph_map;
358
359 if (!intersects (&glyphset)) return_trace (false);
360
361 auto it =
362 + hb_iter (substitute)
363 | hb_map (glyph_map)
364 ;
365
366 auto *out = c->serializer->start_embed (*this);
367 return_trace (out->serialize (c->serializer, it));
368 }
369
370 bool sanitize (hb_sanitize_context_t *c) const
371 {
372 TRACE_SANITIZE (this);
373 return_trace (substitute.sanitize (c));
374 }
375
376 protected:
377 ArrayOf<HBGlyphID>
378 substitute; /* String of GlyphIDs to substitute */
379 public:
380 DEFINE_SIZE_ARRAY (2, substitute);
381};
382
383struct MultipleSubstFormat1
384{
385 bool intersects (const hb_set_t *glyphs) const
386 { return (this+coverage).intersects (glyphs); }
387
388 void closure (hb_closure_context_t *c) const
389 {
390 + hb_zip (this+coverage, sequence)
391 | hb_filter (*c->glyphs, hb_first)
392 | hb_map (hb_second)
393 | hb_map (hb_add (this))
394 | hb_apply ([c] (const Sequence &_) { _.closure (c); })
395 ;
396 }
397
398 void collect_glyphs (hb_collect_glyphs_context_t *c) const
399 {
400 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
401 + hb_zip (this+coverage, sequence)
402 | hb_map (hb_second)
403 | hb_map (hb_add (this))
404 | hb_apply ([c] (const Sequence &_) { _.collect_glyphs (c); })
405 ;
406 }
407
408 const Coverage &get_coverage () const { return this+coverage; }
409
410 bool would_apply (hb_would_apply_context_t *c) const
411 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
412
413 bool apply (hb_ot_apply_context_t *c) const
414 {
415 TRACE_APPLY (this);
416
417 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
418 if (likely (index == NOT_COVERED)) return_trace (false);
419
420 return_trace ((this+sequence[index]).apply (c));
421 }
422
423 bool serialize (hb_serialize_context_t *c,
424 hb_sorted_array_t<const HBGlyphID> glyphs,
425 hb_array_t<const unsigned int> substitute_len_list,
426 hb_array_t<const HBGlyphID> substitute_glyphs_list)
427 {
428 TRACE_SERIALIZE (this);
429 if (unlikely (!c->extend_min (*this))) return_trace (false);
430 if (unlikely (!sequence.serialize (c, glyphs.length))) return_trace (false);
431 for (unsigned int i = 0; i < glyphs.length; i++)
432 {
433 unsigned int substitute_len = substitute_len_list[i];
434 if (unlikely (!sequence[i].serialize (c, this)
435 .serialize (c, substitute_glyphs_list.sub_array (0, substitute_len))))
436 return_trace (false);
437 substitute_glyphs_list += substitute_len;
438 }
439 return_trace (coverage.serialize (c, this).serialize (c, glyphs));
440 }
441
442 bool subset (hb_subset_context_t *c) const
443 {
444 TRACE_SUBSET (this);
445 const hb_set_t &glyphset = *c->plan->glyphset ();
446 const hb_map_t &glyph_map = *c->plan->glyph_map;
447
448 auto *out = c->serializer->start_embed (*this);
449 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
450 out->format = format;
451
452 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
453 + hb_zip (this+coverage, sequence)
454 | hb_filter (glyphset, hb_first)
455 | hb_filter (subset_offset_array (c, out->sequence, this, out), hb_second)
456 | hb_map (hb_first)
457 | hb_map (glyph_map)
458 | hb_sink (new_coverage)
459 ;
460 out->coverage.serialize (c->serializer, out)
461 .serialize (c->serializer, new_coverage.iter ());
462 return_trace (bool (new_coverage));
463 }
464
465 bool sanitize (hb_sanitize_context_t *c) const
466 {
467 TRACE_SANITIZE (this);
468 return_trace (coverage.sanitize (c, this) && sequence.sanitize (c, this));
469 }
470
471 protected:
472 HBUINT16 format; /* Format identifier--format = 1 */
473 OffsetTo<Coverage>
474 coverage; /* Offset to Coverage table--from
475 * beginning of Substitution table */
476 OffsetArrayOf<Sequence>
477 sequence; /* Array of Sequence tables
478 * ordered by Coverage Index */
479 public:
480 DEFINE_SIZE_ARRAY (6, sequence);
481};
482
483struct MultipleSubst
484{
485 bool serialize (hb_serialize_context_t *c,
486 hb_sorted_array_t<const HBGlyphID> glyphs,
487 hb_array_t<const unsigned int> substitute_len_list,
488 hb_array_t<const HBGlyphID> substitute_glyphs_list)
489 {
490 TRACE_SERIALIZE (this);
491 if (unlikely (!c->extend_min (u.format))) return_trace (false);
492 unsigned int format = 1;
493 u.format = format;
494 switch (u.format) {
495 case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, substitute_glyphs_list));
496 default:return_trace (false);
497 }
498 }
499
500 template <typename context_t, typename ...Ts>
501 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
502 {
503 TRACE_DISPATCH (this, u.format);
504 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
505 switch (u.format) {
506 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
507 default:return_trace (c->default_return_value ());
508 }
509 }
510
511 protected:
512 union {
513 HBUINT16 format; /* Format identifier */
514 MultipleSubstFormat1 format1;
515 } u;
516};
517
518struct AlternateSet
519{
520 bool intersects (const hb_set_t *glyphs) const
521 { return hb_any (alternates, glyphs); }
522
523 void closure (hb_closure_context_t *c) const
524 { c->output->add_array (alternates.arrayZ, alternates.len); }
525
526 void collect_glyphs (hb_collect_glyphs_context_t *c) const
527 { c->output->add_array (alternates.arrayZ, alternates.len); }
528
529 bool apply (hb_ot_apply_context_t *c) const
530 {
531 TRACE_APPLY (this);
532 unsigned int count = alternates.len;
533
534 if (unlikely (!count)) return_trace (false);
535
536 hb_mask_t glyph_mask = c->buffer->cur().mask;
537 hb_mask_t lookup_mask = c->lookup_mask;
538
539 /* Note: This breaks badly if two features enabled this lookup together. */
540 unsigned int shift = hb_ctz (lookup_mask);
541 unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
542
543 /* If alt_index is MAX_VALUE, randomize feature if it is the rand feature. */
544 if (alt_index == HB_OT_MAP_MAX_VALUE && c->random)
545 alt_index = c->random_number () % count + 1;
546
547 if (unlikely (alt_index > count || alt_index == 0)) return_trace (false);
548
549 c->replace_glyph (alternates[alt_index - 1]);
550
551 return_trace (true);
552 }
553
554 template <typename Iterator,
555 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
556 bool serialize (hb_serialize_context_t *c,
557 Iterator alts)
558 {
559 TRACE_SERIALIZE (this);
560 return_trace (alternates.serialize (c, alts));
561 }
562
563 bool subset (hb_subset_context_t *c) const
564 {
565 TRACE_SUBSET (this);
566 const hb_set_t &glyphset = *c->plan->glyphset ();
567 const hb_map_t &glyph_map = *c->plan->glyph_map;
568
569 auto it =
570 + hb_iter (alternates)
571 | hb_filter (glyphset)
572 | hb_map (glyph_map)
573 ;
574
575 auto *out = c->serializer->start_embed (*this);
576 return_trace (out->serialize (c->serializer, it) &&
577 out->alternates);
578 }
579
580 bool sanitize (hb_sanitize_context_t *c) const
581 {
582 TRACE_SANITIZE (this);
583 return_trace (alternates.sanitize (c));
584 }
585
586 protected:
587 ArrayOf<HBGlyphID>
588 alternates; /* Array of alternate GlyphIDs--in
589 * arbitrary order */
590 public:
591 DEFINE_SIZE_ARRAY (2, alternates);
592};
593
594struct AlternateSubstFormat1
595{
596 bool intersects (const hb_set_t *glyphs) const
597 { return (this+coverage).intersects (glyphs); }
598
599 void closure (hb_closure_context_t *c) const
600 {
601 + hb_zip (this+coverage, alternateSet)
602 | hb_map (hb_second)
603 | hb_map (hb_add (this))
604 | hb_apply ([c] (const AlternateSet &_) { _.closure (c); })
605 ;
606 }
607
608 void collect_glyphs (hb_collect_glyphs_context_t *c) const
609 {
610 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
611 + hb_zip (this+coverage, alternateSet)
612 | hb_map (hb_second)
613 | hb_map (hb_add (this))
614 | hb_apply ([c] (const AlternateSet &_) { _.collect_glyphs (c); })
615 ;
616 }
617
618 const Coverage &get_coverage () const { return this+coverage; }
619
620 bool would_apply (hb_would_apply_context_t *c) const
621 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
622
623 bool apply (hb_ot_apply_context_t *c) const
624 {
625 TRACE_APPLY (this);
626
627 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
628 if (likely (index == NOT_COVERED)) return_trace (false);
629
630 return_trace ((this+alternateSet[index]).apply (c));
631 }
632
633 bool serialize (hb_serialize_context_t *c,
634 hb_sorted_array_t<const HBGlyphID> glyphs,
635 hb_array_t<const unsigned int> alternate_len_list,
636 hb_array_t<const HBGlyphID> alternate_glyphs_list)
637 {
638 TRACE_SERIALIZE (this);
639 if (unlikely (!c->extend_min (*this))) return_trace (false);
640 if (unlikely (!alternateSet.serialize (c, glyphs.length))) return_trace (false);
641 for (unsigned int i = 0; i < glyphs.length; i++)
642 {
643 unsigned int alternate_len = alternate_len_list[i];
644 if (unlikely (!alternateSet[i].serialize (c, this)
645 .serialize (c, alternate_glyphs_list.sub_array (0, alternate_len))))
646 return_trace (false);
647 alternate_glyphs_list += alternate_len;
648 }
649 return_trace (coverage.serialize (c, this).serialize (c, glyphs));
650 }
651
652 bool subset (hb_subset_context_t *c) const
653 {
654 TRACE_SUBSET (this);
655 const hb_set_t &glyphset = *c->plan->glyphset ();
656 const hb_map_t &glyph_map = *c->plan->glyph_map;
657
658 auto *out = c->serializer->start_embed (*this);
659 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
660 out->format = format;
661
662 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
663 + hb_zip (this+coverage, alternateSet)
664 | hb_filter (glyphset, hb_first)
665 | hb_filter (subset_offset_array (c, out->alternateSet, this, out), hb_second)
666 | hb_map (hb_first)
667 | hb_map (glyph_map)
668 | hb_sink (new_coverage)
669 ;
670 out->coverage.serialize (c->serializer, out)
671 .serialize (c->serializer, new_coverage.iter ());
672 return_trace (bool (new_coverage));
673 }
674
675 bool sanitize (hb_sanitize_context_t *c) const
676 {
677 TRACE_SANITIZE (this);
678 return_trace (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
679 }
680
681 protected:
682 HBUINT16 format; /* Format identifier--format = 1 */
683 OffsetTo<Coverage>
684 coverage; /* Offset to Coverage table--from
685 * beginning of Substitution table */
686 OffsetArrayOf<AlternateSet>
687 alternateSet; /* Array of AlternateSet tables
688 * ordered by Coverage Index */
689 public:
690 DEFINE_SIZE_ARRAY (6, alternateSet);
691};
692
693struct AlternateSubst
694{
695 bool serialize (hb_serialize_context_t *c,
696 hb_sorted_array_t<const HBGlyphID> glyphs,
697 hb_array_t<const unsigned int> alternate_len_list,
698 hb_array_t<const HBGlyphID> alternate_glyphs_list)
699 {
700 TRACE_SERIALIZE (this);
701 if (unlikely (!c->extend_min (u.format))) return_trace (false);
702 unsigned int format = 1;
703 u.format = format;
704 switch (u.format) {
705 case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, alternate_glyphs_list));
706 default:return_trace (false);
707 }
708 }
709
710 template <typename context_t, typename ...Ts>
711 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
712 {
713 TRACE_DISPATCH (this, u.format);
714 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
715 switch (u.format) {
716 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
717 default:return_trace (c->default_return_value ());
718 }
719 }
720
721 protected:
722 union {
723 HBUINT16 format; /* Format identifier */
724 AlternateSubstFormat1 format1;
725 } u;
726};
727
728
729struct Ligature
730{
731 bool intersects (const hb_set_t *glyphs) const
732 { return hb_all (component, glyphs); }
733
734 void closure (hb_closure_context_t *c) const
735 {
736 if (!intersects (c->glyphs)) return;
737 c->output->add (ligGlyph);
738 }
739
740 void collect_glyphs (hb_collect_glyphs_context_t *c) const
741 {
742 c->input->add_array (component.arrayZ, component.get_length ());
743 c->output->add (ligGlyph);
744 }
745
746 bool would_apply (hb_would_apply_context_t *c) const
747 {
748 if (c->len != component.lenP1)
749 return false;
750
751 for (unsigned int i = 1; i < c->len; i++)
752 if (likely (c->glyphs[i] != component[i]))
753 return false;
754
755 return true;
756 }
757
758 bool apply (hb_ot_apply_context_t *c) const
759 {
760 TRACE_APPLY (this);
761 unsigned int count = component.lenP1;
762
763 if (unlikely (!count)) return_trace (false);
764
765 /* Special-case to make it in-place and not consider this
766 * as a "ligated" substitution. */
767 if (unlikely (count == 1))
768 {
769 c->replace_glyph (ligGlyph);
770 return_trace (true);
771 }
772
773 unsigned int total_component_count = 0;
774
775 unsigned int match_length = 0;
776 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
777
778 if (likely (!match_input (c, count,
779 &component[1],
780 match_glyph,
781 nullptr,
782 &match_length,
783 match_positions,
784 &total_component_count)))
785 return_trace (false);
786
787 ligate_input (c,
788 count,
789 match_positions,
790 match_length,
791 ligGlyph,
792 total_component_count);
793
794 return_trace (true);
795 }
796
797 template <typename Iterator,
798 hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
799 bool serialize (hb_serialize_context_t *c,
800 hb_codepoint_t ligature,
801 Iterator components /* Starting from second */)
802 {
803 TRACE_SERIALIZE (this);
804 if (unlikely (!c->extend_min (*this))) return_trace (false);
805 ligGlyph = ligature;
806 if (unlikely (!component.serialize (c, components))) return_trace (false);
807 return_trace (true);
808 }
809
810 bool subset (hb_subset_context_t *c) const
811 {
812 TRACE_SUBSET (this);
813 const hb_set_t &glyphset = *c->plan->glyphset ();
814 const hb_map_t &glyph_map = *c->plan->glyph_map;
815
816 if (!intersects (&glyphset) || !glyphset.has (ligGlyph)) return_trace (false);
817
818 auto it =
819 + hb_iter (component)
820 | hb_map (glyph_map)
821 ;
822
823 auto *out = c->serializer->start_embed (*this);
824 return_trace (out->serialize (c->serializer,
825 glyph_map[ligGlyph],
826 it));
827 }
828
829 public:
830 bool sanitize (hb_sanitize_context_t *c) const
831 {
832 TRACE_SANITIZE (this);
833 return_trace (ligGlyph.sanitize (c) && component.sanitize (c));
834 }
835
836 protected:
837 HBGlyphID ligGlyph; /* GlyphID of ligature to substitute */
838 HeadlessArrayOf<HBGlyphID>
839 component; /* Array of component GlyphIDs--start
840 * with the second component--ordered
841 * in writing direction */
842 public:
843 DEFINE_SIZE_ARRAY (4, component);
844};
845
846struct LigatureSet
847{
848 bool intersects (const hb_set_t *glyphs) const
849 {
850 return
851 + hb_iter (ligature)
852 | hb_map (hb_add (this))
853 | hb_map ([glyphs] (const Ligature &_) { return _.intersects (glyphs); })
854 | hb_any
855 ;
856 }
857
858 void closure (hb_closure_context_t *c) const
859 {
860 + hb_iter (ligature)
861 | hb_map (hb_add (this))
862 | hb_apply ([c] (const Ligature &_) { _.closure (c); })
863 ;
864 }
865
866 void collect_glyphs (hb_collect_glyphs_context_t *c) const
867 {
868 + hb_iter (ligature)
869 | hb_map (hb_add (this))
870 | hb_apply ([c] (const Ligature &_) { _.collect_glyphs (c); })
871 ;
872 }
873
874 bool would_apply (hb_would_apply_context_t *c) const
875 {
876 return
877 + hb_iter (ligature)
878 | hb_map (hb_add (this))
879 | hb_map ([c] (const Ligature &_) { return _.would_apply (c); })
880 | hb_any
881 ;
882 }
883
884 bool apply (hb_ot_apply_context_t *c) const
885 {
886 TRACE_APPLY (this);
887 unsigned int num_ligs = ligature.len;
888 for (unsigned int i = 0; i < num_ligs; i++)
889 {
890 const Ligature &lig = this+ligature[i];
891 if (lig.apply (c)) return_trace (true);
892 }
893
894 return_trace (false);
895 }
896
897 bool serialize (hb_serialize_context_t *c,
898 hb_array_t<const HBGlyphID> ligatures,
899 hb_array_t<const unsigned int> component_count_list,
900 hb_array_t<const HBGlyphID> &component_list /* Starting from second for each ligature */)
901 {
902 TRACE_SERIALIZE (this);
903 if (unlikely (!c->extend_min (*this))) return_trace (false);
904 if (unlikely (!ligature.serialize (c, ligatures.length))) return_trace (false);
905 for (unsigned int i = 0; i < ligatures.length; i++)
906 {
907 unsigned int component_count = (unsigned) hb_max ((int) component_count_list[i] - 1, 0);
908 if (unlikely (!ligature[i].serialize (c, this)
909 .serialize (c,
910 ligatures[i],
911 component_list.sub_array (0, component_count))))
912 return_trace (false);
913 component_list += component_count;
914 }
915 return_trace (true);
916 }
917
918 bool subset (hb_subset_context_t *c) const
919 {
920 TRACE_SUBSET (this);
921 auto *out = c->serializer->start_embed (*this);
922 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
923
924 + hb_iter (ligature)
925 | hb_filter (subset_offset_array (c, out->ligature, this, out))
926 | hb_drain
927 ;
928 return_trace (bool (out->ligature));
929 }
930
931 bool sanitize (hb_sanitize_context_t *c) const
932 {
933 TRACE_SANITIZE (this);
934 return_trace (ligature.sanitize (c, this));
935 }
936
937 protected:
938 OffsetArrayOf<Ligature>
939 ligature; /* Array LigatureSet tables
940 * ordered by preference */
941 public:
942 DEFINE_SIZE_ARRAY (2, ligature);
943};
944
945struct LigatureSubstFormat1
946{
947 bool intersects (const hb_set_t *glyphs) const
948 {
949 return
950 + hb_zip (this+coverage, ligatureSet)
951 | hb_filter (*glyphs, hb_first)
952 | hb_map (hb_second)
953 | hb_map ([this, glyphs] (const OffsetTo<LigatureSet> &_)
954 { return (this+_).intersects (glyphs); })
955 | hb_any
956 ;
957 }
958
959 void closure (hb_closure_context_t *c) const
960 {
961 + hb_zip (this+coverage, ligatureSet)
962 | hb_filter (*c->glyphs, hb_first)
963 | hb_map (hb_second)
964 | hb_map (hb_add (this))
965 | hb_apply ([c] (const LigatureSet &_) { _.closure (c); })
966 ;
967 }
968
969 void collect_glyphs (hb_collect_glyphs_context_t *c) const
970 {
971 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
972
973 + hb_zip (this+coverage, ligatureSet)
974 | hb_map (hb_second)
975 | hb_map (hb_add (this))
976 | hb_apply ([c] (const LigatureSet &_) { _.collect_glyphs (c); })
977 ;
978 }
979
980 const Coverage &get_coverage () const { return this+coverage; }
981
982 bool would_apply (hb_would_apply_context_t *c) const
983 {
984 unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
985 if (likely (index == NOT_COVERED)) return false;
986
987 const LigatureSet &lig_set = this+ligatureSet[index];
988 return lig_set.would_apply (c);
989 }
990
991 bool apply (hb_ot_apply_context_t *c) const
992 {
993 TRACE_APPLY (this);
994
995 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
996 if (likely (index == NOT_COVERED)) return_trace (false);
997
998 const LigatureSet &lig_set = this+ligatureSet[index];
999 return_trace (lig_set.apply (c));
1000 }
1001
1002 bool serialize (hb_serialize_context_t *c,
1003 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1004 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1005 hb_array_t<const HBGlyphID> ligatures_list,
1006 hb_array_t<const unsigned int> component_count_list,
1007 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1008 {
1009 TRACE_SERIALIZE (this);
1010 if (unlikely (!c->extend_min (*this))) return_trace (false);
1011 if (unlikely (!ligatureSet.serialize (c, first_glyphs.length))) return_trace (false);
1012 for (unsigned int i = 0; i < first_glyphs.length; i++)
1013 {
1014 unsigned int ligature_count = ligature_per_first_glyph_count_list[i];
1015 if (unlikely (!ligatureSet[i].serialize (c, this)
1016 .serialize (c,
1017 ligatures_list.sub_array (0, ligature_count),
1018 component_count_list.sub_array (0, ligature_count),
1019 component_list))) return_trace (false);
1020 ligatures_list += ligature_count;
1021 component_count_list += ligature_count;
1022 }
1023 return_trace (coverage.serialize (c, this).serialize (c, first_glyphs));
1024 }
1025
1026 bool subset (hb_subset_context_t *c) const
1027 {
1028 TRACE_SUBSET (this);
1029 const hb_set_t &glyphset = *c->plan->glyphset ();
1030 const hb_map_t &glyph_map = *c->plan->glyph_map;
1031
1032 auto *out = c->serializer->start_embed (*this);
1033 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1034 out->format = format;
1035
1036 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1037 + hb_zip (this+coverage, ligatureSet)
1038 | hb_filter (glyphset, hb_first)
1039 | hb_filter (subset_offset_array (c, out->ligatureSet, this, out), hb_second)
1040 | hb_map (hb_first)
1041 | hb_map (glyph_map)
1042 | hb_sink (new_coverage)
1043 ;
1044 out->coverage.serialize (c->serializer, out)
1045 .serialize (c->serializer, new_coverage.iter ());
1046 return_trace (bool (new_coverage));
1047 }
1048
1049 bool sanitize (hb_sanitize_context_t *c) const
1050 {
1051 TRACE_SANITIZE (this);
1052 return_trace (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
1053 }
1054
1055 protected:
1056 HBUINT16 format; /* Format identifier--format = 1 */
1057 OffsetTo<Coverage>
1058 coverage; /* Offset to Coverage table--from
1059 * beginning of Substitution table */
1060 OffsetArrayOf<LigatureSet>
1061 ligatureSet; /* Array LigatureSet tables
1062 * ordered by Coverage Index */
1063 public:
1064 DEFINE_SIZE_ARRAY (6, ligatureSet);
1065};
1066
1067struct LigatureSubst
1068{
1069 bool serialize (hb_serialize_context_t *c,
1070 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1071 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1072 hb_array_t<const HBGlyphID> ligatures_list,
1073 hb_array_t<const unsigned int> component_count_list,
1074 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1075 {
1076 TRACE_SERIALIZE (this);
1077 if (unlikely (!c->extend_min (u.format))) return_trace (false);
1078 unsigned int format = 1;
1079 u.format = format;
1080 switch (u.format) {
1081 case 1: return_trace (u.format1.serialize (c,
1082 first_glyphs,
1083 ligature_per_first_glyph_count_list,
1084 ligatures_list,
1085 component_count_list,
1086 component_list));
1087 default:return_trace (false);
1088 }
1089 }
1090
1091 template <typename context_t, typename ...Ts>
1092 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1093 {
1094 TRACE_DISPATCH (this, u.format);
1095 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1096 switch (u.format) {
1097 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1098 default:return_trace (c->default_return_value ());
1099 }
1100 }
1101
1102 protected:
1103 union {
1104 HBUINT16 format; /* Format identifier */
1105 LigatureSubstFormat1 format1;
1106 } u;
1107};
1108
1109
1110struct ContextSubst : Context {};
1111
1112struct ChainContextSubst : ChainContext {};
1113
1114struct ExtensionSubst : Extension<ExtensionSubst>
1115{
1116 typedef struct SubstLookupSubTable SubTable;
1117
1118 bool is_reverse () const;
1119};
1120
1121
1122struct ReverseChainSingleSubstFormat1
1123{
1124 bool intersects (const hb_set_t *glyphs) const
1125 {
1126 if (!(this+coverage).intersects (glyphs))
1127 return false;
1128
1129 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1130
1131 unsigned int count;
1132
1133 count = backtrack.len;
1134 for (unsigned int i = 0; i < count; i++)
1135 if (!(this+backtrack[i]).intersects (glyphs))
1136 return false;
1137
1138 count = lookahead.len;
1139 for (unsigned int i = 0; i < count; i++)
1140 if (!(this+lookahead[i]).intersects (glyphs))
1141 return false;
1142
1143 return true;
1144 }
1145
1146 void closure (hb_closure_context_t *c) const
1147 {
1148 if (!intersects (c->glyphs)) return;
1149
1150 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1151 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1152
1153 + hb_zip (this+coverage, substitute)
1154 | hb_filter (*c->glyphs, hb_first)
1155 | hb_map (hb_second)
1156 | hb_sink (c->output)
1157 ;
1158 }
1159
1160 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1161 {
1162 if (unlikely (!(this+coverage).add_coverage (c->input))) return;
1163
1164 unsigned int count;
1165
1166 count = backtrack.len;
1167 for (unsigned int i = 0; i < count; i++)
1168 if (unlikely (!(this+backtrack[i]).add_coverage (c->before))) return;
1169
1170 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1171 count = lookahead.len;
1172 for (unsigned int i = 0; i < count; i++)
1173 if (unlikely (!(this+lookahead[i]).add_coverage (c->after))) return;
1174
1175 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1176 count = substitute.len;
1177 c->output->add_array (substitute.arrayZ, substitute.len);
1178 }
1179
1180 const Coverage &get_coverage () const { return this+coverage; }
1181
1182 bool would_apply (hb_would_apply_context_t *c) const
1183 { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
1184
1185 bool apply (hb_ot_apply_context_t *c) const
1186 {
1187 TRACE_APPLY (this);
1188 if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
1189 return_trace (false); /* No chaining to this type */
1190
1191 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1192 if (likely (index == NOT_COVERED)) return_trace (false);
1193
1194 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1195 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1196
1197 unsigned int start_index = 0, end_index = 0;
1198 if (match_backtrack (c,
1199 backtrack.len, (HBUINT16 *) backtrack.arrayZ,
1200 match_coverage, this,
1201 &start_index) &&
1202 match_lookahead (c,
1203 lookahead.len, (HBUINT16 *) lookahead.arrayZ,
1204 match_coverage, this,
1205 1, &end_index))
1206 {
1207 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
1208 c->replace_glyph_inplace (substitute[index]);
1209 /* Note: We DON'T decrease buffer->idx. The main loop does it
1210 * for us. This is useful for preventing surprises if someone
1211 * calls us through a Context lookup. */
1212 return_trace (true);
1213 }
1214
1215 return_trace (false);
1216 }
1217
1218 bool subset (hb_subset_context_t *c) const
1219 {
1220 TRACE_SUBSET (this);
1221 // TODO(subset)
1222 return_trace (false);
1223 }
1224
1225 bool sanitize (hb_sanitize_context_t *c) const
1226 {
1227 TRACE_SANITIZE (this);
1228 if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
1229 return_trace (false);
1230 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
1231 if (!lookahead.sanitize (c, this))
1232 return_trace (false);
1233 const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
1234 return_trace (substitute.sanitize (c));
1235 }
1236
1237 protected:
1238 HBUINT16 format; /* Format identifier--format = 1 */
1239 OffsetTo<Coverage>
1240 coverage; /* Offset to Coverage table--from
1241 * beginning of table */
1242 OffsetArrayOf<Coverage>
1243 backtrack; /* Array of coverage tables
1244 * in backtracking sequence, in glyph
1245 * sequence order */
1246 OffsetArrayOf<Coverage>
1247 lookaheadX; /* Array of coverage tables
1248 * in lookahead sequence, in glyph
1249 * sequence order */
1250 ArrayOf<HBGlyphID>
1251 substituteX; /* Array of substitute
1252 * GlyphIDs--ordered by Coverage Index */
1253 public:
1254 DEFINE_SIZE_MIN (10);
1255};
1256
1257struct ReverseChainSingleSubst
1258{
1259 template <typename context_t, typename ...Ts>
1260 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1261 {
1262 TRACE_DISPATCH (this, u.format);
1263 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1264 switch (u.format) {
1265 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1266 default:return_trace (c->default_return_value ());
1267 }
1268 }
1269
1270 protected:
1271 union {
1272 HBUINT16 format; /* Format identifier */
1273 ReverseChainSingleSubstFormat1 format1;
1274 } u;
1275};
1276
1277
1278
1279/*
1280 * SubstLookup
1281 */
1282
1283struct SubstLookupSubTable
1284{
1285 friend struct Lookup;
1286 friend struct SubstLookup;
1287
1288 enum Type {
1289 Single = 1,
1290 Multiple = 2,
1291 Alternate = 3,
1292 Ligature = 4,
1293 Context = 5,
1294 ChainContext = 6,
1295 Extension = 7,
1296 ReverseChainSingle = 8
1297 };
1298
1299 template <typename context_t, typename ...Ts>
1300 typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
1301 {
1302 TRACE_DISPATCH (this, lookup_type);
1303 switch (lookup_type) {
1304 case Single: return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
1305 case Multiple: return_trace (u.multiple.dispatch (c, hb_forward<Ts> (ds)...));
1306 case Alternate: return_trace (u.alternate.dispatch (c, hb_forward<Ts> (ds)...));
1307 case Ligature: return_trace (u.ligature.dispatch (c, hb_forward<Ts> (ds)...));
1308 case Context: return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
1309 case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
1310 case Extension: return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
1311 case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c, hb_forward<Ts> (ds)...));
1312 default: return_trace (c->default_return_value ());
1313 }
1314 }
1315
1316 protected:
1317 union {
1318 SingleSubst single;
1319 MultipleSubst multiple;
1320 AlternateSubst alternate;
1321 LigatureSubst ligature;
1322 ContextSubst context;
1323 ChainContextSubst chainContext;
1324 ExtensionSubst extension;
1325 ReverseChainSingleSubst reverseChainContextSingle;
1326 } u;
1327 public:
1328 DEFINE_SIZE_MIN (0);
1329};
1330
1331
1332struct SubstLookup : Lookup
1333{
1334 typedef SubstLookupSubTable SubTable;
1335
1336 const SubTable& get_subtable (unsigned int i) const
1337 { return Lookup::get_subtable<SubTable> (i); }
1338
1339 HB_INTERNAL static bool lookup_type_is_reverse (unsigned int lookup_type)
1340 { return lookup_type == SubTable::ReverseChainSingle; }
1341
1342 bool is_reverse () const
1343 {
1344 unsigned int type = get_type ();
1345 if (unlikely (type == SubTable::Extension))
1346 return CastR<ExtensionSubst> (get_subtable(0)).is_reverse ();
1347 return lookup_type_is_reverse (type);
1348 }
1349
1350 bool apply (hb_ot_apply_context_t *c) const
1351 {
1352 TRACE_APPLY (this);
1353 return_trace (dispatch (c));
1354 }
1355
1356 bool intersects (const hb_set_t *glyphs) const
1357 {
1358 hb_intersects_context_t c (glyphs);
1359 return dispatch (&c);
1360 }
1361
1362 hb_closure_context_t::return_t closure (hb_closure_context_t *c, unsigned int this_index) const
1363 {
1364 if (!c->should_visit_lookup (this_index))
1365 return hb_closure_context_t::default_return_value ();
1366
1367 c->set_recurse_func (dispatch_closure_recurse_func);
1368
1369 hb_closure_context_t::return_t ret = dispatch (c);
1370
1371 c->flush ();
1372
1373 return ret;
1374 }
1375
1376 hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
1377 {
1378 c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>);
1379 return dispatch (c);
1380 }
1381
1382 template <typename set_t>
1383 void add_coverage (set_t *glyphs) const
1384 {
1385 hb_add_coverage_context_t<set_t> c (glyphs);
1386 dispatch (&c);
1387 }
1388
1389 bool would_apply (hb_would_apply_context_t *c,
1390 const hb_ot_layout_lookup_accelerator_t *accel) const
1391 {
1392 if (unlikely (!c->len)) return false;
1393 if (!accel->may_have (c->glyphs[0])) return false;
1394 return dispatch (c);
1395 }
1396
1397 HB_INTERNAL static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
1398
1399 SubTable& serialize_subtable (hb_serialize_context_t *c,
1400 unsigned int i)
1401 { return get_subtables<SubTable> ()[i].serialize (c, this); }
1402
1403 bool serialize_single (hb_serialize_context_t *c,
1404 uint32_t lookup_props,
1405 hb_sorted_array_t<const HBGlyphID> glyphs,
1406 hb_array_t<const HBGlyphID> substitutes)
1407 {
1408 TRACE_SERIALIZE (this);
1409 if (unlikely (!Lookup::serialize (c, SubTable::Single, lookup_props, 1))) return_trace (false);
1410 return_trace (serialize_subtable (c, 0).u.single.
1411 serialize (c, hb_zip (glyphs, substitutes)));
1412 }
1413
1414 bool serialize_multiple (hb_serialize_context_t *c,
1415 uint32_t lookup_props,
1416 hb_sorted_array_t<const HBGlyphID> glyphs,
1417 hb_array_t<const unsigned int> substitute_len_list,
1418 hb_array_t<const HBGlyphID> substitute_glyphs_list)
1419 {
1420 TRACE_SERIALIZE (this);
1421 if (unlikely (!Lookup::serialize (c, SubTable::Multiple, lookup_props, 1))) return_trace (false);
1422 return_trace (serialize_subtable (c, 0).u.multiple.
1423 serialize (c,
1424 glyphs,
1425 substitute_len_list,
1426 substitute_glyphs_list));
1427 }
1428
1429 bool serialize_alternate (hb_serialize_context_t *c,
1430 uint32_t lookup_props,
1431 hb_sorted_array_t<const HBGlyphID> glyphs,
1432 hb_array_t<const unsigned int> alternate_len_list,
1433 hb_array_t<const HBGlyphID> alternate_glyphs_list)
1434 {
1435 TRACE_SERIALIZE (this);
1436 if (unlikely (!Lookup::serialize (c, SubTable::Alternate, lookup_props, 1))) return_trace (false);
1437 return_trace (serialize_subtable (c, 0).u.alternate.
1438 serialize (c,
1439 glyphs,
1440 alternate_len_list,
1441 alternate_glyphs_list));
1442 }
1443
1444 bool serialize_ligature (hb_serialize_context_t *c,
1445 uint32_t lookup_props,
1446 hb_sorted_array_t<const HBGlyphID> first_glyphs,
1447 hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
1448 hb_array_t<const HBGlyphID> ligatures_list,
1449 hb_array_t<const unsigned int> component_count_list,
1450 hb_array_t<const HBGlyphID> component_list /* Starting from second for each ligature */)
1451 {
1452 TRACE_SERIALIZE (this);
1453 if (unlikely (!Lookup::serialize (c, SubTable::Ligature, lookup_props, 1))) return_trace (false);
1454 return_trace (serialize_subtable (c, 0).u.ligature.
1455 serialize (c,
1456 first_glyphs,
1457 ligature_per_first_glyph_count_list,
1458 ligatures_list,
1459 component_count_list,
1460 component_list));
1461 }
1462
1463 template <typename context_t>
1464 HB_INTERNAL static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
1465
1466 HB_INTERNAL static hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
1467 {
1468 if (!c->should_visit_lookup (lookup_index))
1469 return hb_empty_t ();
1470
1471 hb_closure_context_t::return_t ret = dispatch_recurse_func (c, lookup_index);
1472
1473 /* While in theory we should flush here, it will cause timeouts because a recursive
1474 * lookup can keep growing the glyph set. Skip, and outer loop will retry up to
1475 * HB_CLOSURE_MAX_STAGES time, which should be enough for every realistic font. */
1476 //c->flush ();
1477
1478 return ret;
1479 }
1480
1481 template <typename context_t, typename ...Ts>
1482 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1483 { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
1484
1485 bool subset (hb_subset_context_t *c) const
1486 { return Lookup::subset<SubTable> (c); }
1487
1488 bool sanitize (hb_sanitize_context_t *c) const
1489 { return Lookup::sanitize<SubTable> (c); }
1490};
1491
1492/*
1493 * GSUB -- Glyph Substitution
1494 * https://docs.microsoft.com/en-us/typography/opentype/spec/gsub
1495 */
1496
1497struct GSUB : GSUBGPOS
1498{
1499 static constexpr hb_tag_t tableTag = HB_OT_TAG_GSUB;
1500
1501 const SubstLookup& get_lookup (unsigned int i) const
1502 { return CastR<SubstLookup> (GSUBGPOS::get_lookup (i)); }
1503
1504 bool subset (hb_subset_context_t *c) const
1505 { return GSUBGPOS::subset<SubstLookup> (c); }
1506
1507 bool sanitize (hb_sanitize_context_t *c) const
1508 { return GSUBGPOS::sanitize<SubstLookup> (c); }
1509
1510 HB_INTERNAL bool is_blacklisted (hb_blob_t *blob,
1511 hb_face_t *face) const;
1512
1513 typedef GSUBGPOS::accelerator_t<GSUB> accelerator_t;
1514};
1515
1516
1517struct GSUB_accelerator_t : GSUB::accelerator_t {};
1518
1519
1520/* Out-of-class implementation for methods recursing */
1521
1522#ifndef HB_NO_OT_LAYOUT
1523/*static*/ inline bool ExtensionSubst::is_reverse () const
1524{
1525 unsigned int type = get_type ();
1526 if (unlikely (type == SubTable::Extension))
1527 return CastR<ExtensionSubst> (get_subtable<SubTable>()).is_reverse ();
1528 return SubstLookup::lookup_type_is_reverse (type);
1529}
1530template <typename context_t>
1531/*static*/ inline typename context_t::return_t SubstLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
1532{
1533 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
1534 return l.dispatch (c);
1535}
1536/*static*/ inline bool SubstLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
1537{
1538 const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
1539 unsigned int saved_lookup_props = c->lookup_props;
1540 unsigned int saved_lookup_index = c->lookup_index;
1541 c->set_lookup_index (lookup_index);
1542 c->set_lookup_props (l.get_props ());
1543 bool ret = l.dispatch (c);
1544 c->set_lookup_index (saved_lookup_index);
1545 c->set_lookup_props (saved_lookup_props);
1546 return ret;
1547}
1548#endif
1549
1550
1551} /* namespace OT */
1552
1553
1554#endif /* HB_OT_LAYOUT_GSUB_TABLE_HH */
1555