1/*
2 * Copyright © 2007,2008,2009 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29#ifndef HB_OT_LAYOUT_COMMON_HH
30#define HB_OT_LAYOUT_COMMON_HH
31
32#include "hb.hh"
33#include "hb-ot-layout.hh"
34#include "hb-open-type.hh"
35#include "hb-set.hh"
36#include "hb-bimap.hh"
37
38
39#ifndef HB_MAX_NESTING_LEVEL
40#define HB_MAX_NESTING_LEVEL 6
41#endif
42#ifndef HB_MAX_CONTEXT_LENGTH
43#define HB_MAX_CONTEXT_LENGTH 64
44#endif
45#ifndef HB_CLOSURE_MAX_STAGES
46/*
47 * The maximum number of times a lookup can be applied during shaping.
48 * Used to limit the number of iterations of the closure algorithm.
49 * This must be larger than the number of times add_pause() is
50 * called in a collect_features call of any shaper.
51 */
52#define HB_CLOSURE_MAX_STAGES 32
53#endif
54
55#ifndef HB_MAX_SCRIPTS
56#define HB_MAX_SCRIPTS 500
57#endif
58
59#ifndef HB_MAX_LANGSYS
60#define HB_MAX_LANGSYS 2000
61#endif
62
63#ifndef HB_MAX_FEATURES
64#define HB_MAX_FEATURES 750
65#endif
66
67#ifndef HB_MAX_FEATURE_INDICES
68#define HB_MAX_FEATURE_INDICES 1500
69#endif
70
71#ifndef HB_MAX_LOOKUP_INDICES
72#define HB_MAX_LOOKUP_INDICES 20000
73#endif
74
75
76namespace OT {
77
78
79#define NOT_COVERED ((unsigned int) -1)
80
81
82template<typename Iterator>
83static inline void Coverage_serialize (hb_serialize_context_t *c,
84 Iterator it);
85
86template<typename Iterator>
87static inline void ClassDef_serialize (hb_serialize_context_t *c,
88 Iterator it);
89
90static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
91 const hb_set_t &glyphset,
92 const hb_map_t &gid_klass_map,
93 hb_sorted_vector_t<HBGlyphID> &glyphs,
94 const hb_set_t &klasses,
95 hb_map_t *klass_map /*INOUT*/);
96
97struct hb_subset_layout_context_t :
98 hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
99{
100 const char *get_name () { return "SUBSET_LAYOUT"; }
101 static return_t default_return_value () { return hb_empty_t (); }
102
103 bool visitScript ()
104 {
105 return script_count++ < HB_MAX_SCRIPTS;
106 }
107
108 bool visitLangSys ()
109 {
110 return langsys_count++ < HB_MAX_LANGSYS;
111 }
112
113 bool visitFeatureIndex (int count)
114 {
115 feature_index_count += count;
116 return feature_index_count < HB_MAX_FEATURE_INDICES;
117 }
118
119 bool visitLookupIndex()
120 {
121 lookup_index_count++;
122 return lookup_index_count < HB_MAX_LOOKUP_INDICES;
123 }
124
125 hb_subset_context_t *subset_context;
126 const hb_tag_t table_tag;
127 const hb_map_t *lookup_index_map;
128 const hb_map_t *feature_index_map;
129
130 hb_subset_layout_context_t (hb_subset_context_t *c_,
131 hb_tag_t tag_,
132 hb_map_t *lookup_map_,
133 hb_map_t *feature_map_) :
134 subset_context (c_),
135 table_tag (tag_),
136 lookup_index_map (lookup_map_),
137 feature_index_map (feature_map_),
138 script_count (0),
139 langsys_count (0),
140 feature_index_count (0),
141 lookup_index_count (0)
142 {}
143
144 private:
145 unsigned script_count;
146 unsigned langsys_count;
147 unsigned feature_index_count;
148 unsigned lookup_index_count;
149};
150
151struct hb_collect_variation_indices_context_t :
152 hb_dispatch_context_t<hb_collect_variation_indices_context_t>
153{
154 template <typename T>
155 return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
156 static return_t default_return_value () { return hb_empty_t (); }
157
158 hb_set_t *layout_variation_indices;
159 const hb_set_t *glyph_set;
160 const hb_map_t *gpos_lookups;
161
162 hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
163 const hb_set_t *glyph_set_,
164 const hb_map_t *gpos_lookups_) :
165 layout_variation_indices (layout_variation_indices_),
166 glyph_set (glyph_set_),
167 gpos_lookups (gpos_lookups_) {}
168};
169
170template<typename OutputArray>
171struct subset_offset_array_t
172{
173 subset_offset_array_t (hb_subset_context_t *subset_context_,
174 OutputArray& out_,
175 const void *base_) : subset_context (subset_context_),
176 out (out_), base (base_) {}
177
178 template <typename T>
179 bool operator () (T&& offset)
180 {
181 auto *o = out.serialize_append (subset_context->serializer);
182 if (unlikely (!o)) return false;
183 auto snap = subset_context->serializer->snapshot ();
184 bool ret = o->serialize_subset (subset_context, offset, base);
185 if (!ret)
186 {
187 out.pop ();
188 subset_context->serializer->revert (snap);
189 }
190 return ret;
191 }
192
193 private:
194 hb_subset_context_t *subset_context;
195 OutputArray &out;
196 const void *base;
197};
198
199
200template<typename OutputArray, typename Arg>
201struct subset_offset_array_arg_t
202{
203 subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
204 OutputArray& out_,
205 const void *base_,
206 Arg &&arg_) : subset_context (subset_context_), out (out_),
207 base (base_), arg (arg_) {}
208
209 template <typename T>
210 bool operator () (T&& offset)
211 {
212 auto *o = out.serialize_append (subset_context->serializer);
213 if (unlikely (!o)) return false;
214 auto snap = subset_context->serializer->snapshot ();
215 bool ret = o->serialize_subset (subset_context, offset, base, arg);
216 if (!ret)
217 {
218 out.pop ();
219 subset_context->serializer->revert (snap);
220 }
221 return ret;
222 }
223
224 private:
225 hb_subset_context_t *subset_context;
226 OutputArray &out;
227 const void *base;
228 Arg &&arg;
229};
230
231/*
232 * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
233 * and discards the offset in the array if the subset operation results in an empty
234 * thing.
235 */
236struct
237{
238 template<typename OutputArray>
239 subset_offset_array_t<OutputArray>
240 operator () (hb_subset_context_t *subset_context, OutputArray& out,
241 const void *base) const
242 { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
243
244 /* Variant with one extra argument passed to serialize_subset */
245 template<typename OutputArray, typename Arg>
246 subset_offset_array_arg_t<OutputArray, Arg>
247 operator () (hb_subset_context_t *subset_context, OutputArray& out,
248 const void *base, Arg &&arg) const
249 { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
250}
251HB_FUNCOBJ (subset_offset_array);
252
253template<typename OutputArray>
254struct subset_record_array_t
255{
256 subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
257 const void *base_) : subset_layout_context (c_),
258 out (out_), base (base_) {}
259
260 template <typename T>
261 void
262 operator () (T&& record)
263 {
264 auto snap = subset_layout_context->subset_context->serializer->snapshot ();
265 bool ret = record.subset (subset_layout_context, base);
266 if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
267 else out->len++;
268 }
269
270 private:
271 hb_subset_layout_context_t *subset_layout_context;
272 OutputArray *out;
273 const void *base;
274};
275
276/*
277 * Helper to subset a RecordList/record array. Subsets each Record in the array and
278 * discards the record if the subset operation returns false.
279 */
280struct
281{
282 template<typename OutputArray>
283 subset_record_array_t<OutputArray>
284 operator () (hb_subset_layout_context_t *c, OutputArray* out,
285 const void *base) const
286 { return subset_record_array_t<OutputArray> (c, out, base); }
287}
288HB_FUNCOBJ (subset_record_array);
289
290/*
291 *
292 * OpenType Layout Common Table Formats
293 *
294 */
295
296
297/*
298 * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
299 */
300
301struct Record_sanitize_closure_t {
302 hb_tag_t tag;
303 const void *list_base;
304};
305
306template <typename Type>
307struct Record
308{
309 int cmp (hb_tag_t a) const { return tag.cmp (a); }
310
311 bool subset (hb_subset_layout_context_t *c, const void *base) const
312 {
313 TRACE_SUBSET (this);
314 auto *out = c->subset_context->serializer->embed (this);
315 if (unlikely (!out)) return_trace (false);
316 bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag);
317 return_trace (ret);
318 }
319
320 bool sanitize (hb_sanitize_context_t *c, const void *base) const
321 {
322 TRACE_SANITIZE (this);
323 const Record_sanitize_closure_t closure = {tag, base};
324 return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
325 }
326
327 Tag tag; /* 4-byte Tag identifier */
328 OffsetTo<Type>
329 offset; /* Offset from beginning of object holding
330 * the Record */
331 public:
332 DEFINE_SIZE_STATIC (6);
333};
334
335template <typename Type>
336struct RecordArrayOf : SortedArrayOf<Record<Type>>
337{
338 const OffsetTo<Type>& get_offset (unsigned int i) const
339 { return (*this)[i].offset; }
340 OffsetTo<Type>& get_offset (unsigned int i)
341 { return (*this)[i].offset; }
342 const Tag& get_tag (unsigned int i) const
343 { return (*this)[i].tag; }
344 unsigned int get_tags (unsigned int start_offset,
345 unsigned int *record_count /* IN/OUT */,
346 hb_tag_t *record_tags /* OUT */) const
347 {
348 if (record_count)
349 {
350 + this->sub_array (start_offset, record_count)
351 | hb_map (&Record<Type>::tag)
352 | hb_sink (hb_array (record_tags, *record_count))
353 ;
354 }
355 return this->len;
356 }
357 bool find_index (hb_tag_t tag, unsigned int *index) const
358 {
359 return this->bfind (tag, index, HB_BFIND_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
360 }
361};
362
363template <typename Type>
364struct RecordListOf : RecordArrayOf<Type>
365{
366 const Type& operator [] (unsigned int i) const
367 { return this+this->get_offset (i); }
368
369 bool subset (hb_subset_context_t *c,
370 hb_subset_layout_context_t *l) const
371 {
372 TRACE_SUBSET (this);
373 auto *out = c->serializer->start_embed (*this);
374 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
375
376 + this->iter ()
377 | hb_apply (subset_record_array (l, out, this))
378 ;
379 return_trace (true);
380 }
381
382 bool sanitize (hb_sanitize_context_t *c) const
383 {
384 TRACE_SANITIZE (this);
385 return_trace (RecordArrayOf<Type>::sanitize (c, this));
386 }
387};
388
389struct Feature;
390
391struct RecordListOfFeature : RecordListOf<Feature>
392{
393 bool subset (hb_subset_context_t *c,
394 hb_subset_layout_context_t *l) const
395 {
396 TRACE_SUBSET (this);
397 auto *out = c->serializer->start_embed (*this);
398 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
399
400 unsigned count = this->len;
401 + hb_zip (*this, hb_range (count))
402 | hb_filter (l->feature_index_map, hb_second)
403 | hb_map (hb_first)
404 | hb_apply (subset_record_array (l, out, this))
405 ;
406 return_trace (true);
407 }
408};
409
410struct RangeRecord
411{
412 int cmp (hb_codepoint_t g) const
413 { return g < first ? -1 : g <= last ? 0 : +1; }
414
415 bool sanitize (hb_sanitize_context_t *c) const
416 {
417 TRACE_SANITIZE (this);
418 return_trace (c->check_struct (this));
419 }
420
421 bool intersects (const hb_set_t *glyphs) const
422 { return glyphs->intersects (first, last); }
423
424 template <typename set_t>
425 bool collect_coverage (set_t *glyphs) const
426 { return glyphs->add_range (first, last); }
427
428 HBGlyphID first; /* First GlyphID in the range */
429 HBGlyphID last; /* Last GlyphID in the range */
430 HBUINT16 value; /* Value */
431 public:
432 DEFINE_SIZE_STATIC (6);
433};
434DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
435
436
437struct IndexArray : ArrayOf<Index>
438{
439 bool intersects (const hb_map_t *indexes) const
440 { return hb_any (*this, indexes); }
441
442 template <typename Iterator,
443 hb_requires (hb_is_iterator (Iterator))>
444 void serialize (hb_serialize_context_t *c,
445 hb_subset_layout_context_t *l,
446 Iterator it)
447 {
448 if (!it) return;
449 if (unlikely (!c->extend_min ((*this)))) return;
450
451 for (const auto _ : it)
452 {
453 if (!l->visitLookupIndex()) break;
454
455 Index i;
456 i = _;
457 c->copy (i);
458 this->len++;
459 }
460 }
461
462 unsigned int get_indexes (unsigned int start_offset,
463 unsigned int *_count /* IN/OUT */,
464 unsigned int *_indexes /* OUT */) const
465 {
466 if (_count)
467 {
468 + this->sub_array (start_offset, _count)
469 | hb_sink (hb_array (_indexes, *_count))
470 ;
471 }
472 return this->len;
473 }
474
475 void add_indexes_to (hb_set_t* output /* OUT */) const
476 {
477 output->add_array (arrayZ, len);
478 }
479};
480
481
482struct LangSys
483{
484 unsigned int get_feature_count () const
485 { return featureIndex.len; }
486 hb_tag_t get_feature_index (unsigned int i) const
487 { return featureIndex[i]; }
488 unsigned int get_feature_indexes (unsigned int start_offset,
489 unsigned int *feature_count /* IN/OUT */,
490 unsigned int *feature_indexes /* OUT */) const
491 { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
492 void add_feature_indexes_to (hb_set_t *feature_indexes) const
493 { featureIndex.add_indexes_to (feature_indexes); }
494
495 bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
496 unsigned int get_required_feature_index () const
497 {
498 if (reqFeatureIndex == 0xFFFFu)
499 return Index::NOT_FOUND_INDEX;
500 return reqFeatureIndex;
501 }
502
503 LangSys* copy (hb_serialize_context_t *c) const
504 {
505 TRACE_SERIALIZE (this);
506 return_trace (c->embed (*this));
507 }
508
509 bool operator == (const LangSys& o) const
510 {
511 if (featureIndex.len != o.featureIndex.len ||
512 reqFeatureIndex != o.reqFeatureIndex)
513 return false;
514
515 for (const auto _ : + hb_zip (featureIndex, o.featureIndex))
516 if (_.first != _.second) return false;
517
518 return true;
519 }
520
521 bool subset (hb_subset_context_t *c,
522 hb_subset_layout_context_t *l,
523 const Tag *tag = nullptr) const
524 {
525 TRACE_SUBSET (this);
526 auto *out = c->serializer->start_embed (*this);
527 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
528
529 out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
530
531 if (!l->visitFeatureIndex (featureIndex.len))
532 return_trace (false);
533
534 auto it =
535 + hb_iter (featureIndex)
536 | hb_filter (l->feature_index_map)
537 | hb_map (l->feature_index_map)
538 ;
539
540 bool ret = bool (it);
541 out->featureIndex.serialize (c->serializer, l, it);
542 return_trace (ret);
543 }
544
545 bool sanitize (hb_sanitize_context_t *c,
546 const Record_sanitize_closure_t * = nullptr) const
547 {
548 TRACE_SANITIZE (this);
549 return_trace (c->check_struct (this) && featureIndex.sanitize (c));
550 }
551
552 Offset16 lookupOrderZ; /* = Null (reserved for an offset to a
553 * reordering table) */
554 HBUINT16 reqFeatureIndex;/* Index of a feature required for this
555 * language system--if no required features
556 * = 0xFFFFu */
557 IndexArray featureIndex; /* Array of indices into the FeatureList */
558 public:
559 DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
560};
561DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
562
563struct Script
564{
565 unsigned int get_lang_sys_count () const
566 { return langSys.len; }
567 const Tag& get_lang_sys_tag (unsigned int i) const
568 { return langSys.get_tag (i); }
569 unsigned int get_lang_sys_tags (unsigned int start_offset,
570 unsigned int *lang_sys_count /* IN/OUT */,
571 hb_tag_t *lang_sys_tags /* OUT */) const
572 { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
573 const LangSys& get_lang_sys (unsigned int i) const
574 {
575 if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
576 return this+langSys[i].offset;
577 }
578 bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
579 { return langSys.find_index (tag, index); }
580
581 bool has_default_lang_sys () const { return defaultLangSys != 0; }
582 const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
583
584 bool subset (hb_subset_context_t *c,
585 hb_subset_layout_context_t *l,
586 const Tag *tag) const
587 {
588 TRACE_SUBSET (this);
589 if (!l->visitScript ()) return_trace (false);
590
591 auto *out = c->serializer->start_embed (*this);
592 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
593
594 bool defaultLang = false;
595 if (has_default_lang_sys ())
596 {
597 c->serializer->push ();
598 const LangSys& ls = this+defaultLangSys;
599 bool ret = ls.subset (c, l);
600 if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
601 {
602 c->serializer->pop_discard ();
603 out->defaultLangSys = 0;
604 }
605 else
606 {
607 c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
608 defaultLang = true;
609 }
610 }
611
612 + langSys.iter ()
613 | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
614 | hb_filter ([&] (const Record<LangSys>& record)
615 {
616 const LangSys& d = this+defaultLangSys;
617 const LangSys& l = this+record.offset;
618 return !(l == d);
619 })
620 | hb_apply (subset_record_array (l, &(out->langSys), this))
621 ;
622
623 return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
624 }
625
626 bool sanitize (hb_sanitize_context_t *c,
627 const Record_sanitize_closure_t * = nullptr) const
628 {
629 TRACE_SANITIZE (this);
630 return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
631 }
632
633 protected:
634 OffsetTo<LangSys>
635 defaultLangSys; /* Offset to DefaultLangSys table--from
636 * beginning of Script table--may be Null */
637 RecordArrayOf<LangSys>
638 langSys; /* Array of LangSysRecords--listed
639 * alphabetically by LangSysTag */
640 public:
641 DEFINE_SIZE_ARRAY_SIZED (4, langSys);
642};
643
644typedef RecordListOf<Script> ScriptList;
645
646
647/* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
648struct FeatureParamsSize
649{
650 bool sanitize (hb_sanitize_context_t *c) const
651 {
652 TRACE_SANITIZE (this);
653 if (unlikely (!c->check_struct (this))) return_trace (false);
654
655 /* This subtable has some "history", if you will. Some earlier versions of
656 * Adobe tools calculated the offset of the FeatureParams sutable from the
657 * beginning of the FeatureList table! Now, that is dealt with in the
658 * Feature implementation. But we still need to be able to tell junk from
659 * real data. Note: We don't check that the nameID actually exists.
660 *
661 * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
662 *
663 * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
664 * coming out soon, and that the makeotf program will build a font with a
665 * 'size' feature that is correct by the specification.
666 *
667 * The specification for this feature tag is in the "OpenType Layout Tag
668 * Registry". You can see a copy of this at:
669 * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
670 *
671 * Here is one set of rules to determine if the 'size' feature is built
672 * correctly, or as by the older versions of MakeOTF. You may be able to do
673 * better.
674 *
675 * Assume that the offset to the size feature is according to specification,
676 * and make the following value checks. If it fails, assume the size
677 * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
678 * If this fails, reject the 'size' feature. The older makeOTF's calculated the
679 * offset from the beginning of the FeatureList table, rather than from the
680 * beginning of the 'size' Feature table.
681 *
682 * If "design size" == 0:
683 * fails check
684 *
685 * Else if ("subfamily identifier" == 0 and
686 * "range start" == 0 and
687 * "range end" == 0 and
688 * "range start" == 0 and
689 * "menu name ID" == 0)
690 * passes check: this is the format used when there is a design size
691 * specified, but there is no recommended size range.
692 *
693 * Else if ("design size" < "range start" or
694 * "design size" > "range end" or
695 * "range end" <= "range start" or
696 * "menu name ID" < 256 or
697 * "menu name ID" > 32767 or
698 * menu name ID is not a name ID which is actually in the name table)
699 * fails test
700 * Else
701 * passes test.
702 */
703
704 if (!designSize)
705 return_trace (false);
706 else if (subfamilyID == 0 &&
707 subfamilyNameID == 0 &&
708 rangeStart == 0 &&
709 rangeEnd == 0)
710 return_trace (true);
711 else if (designSize < rangeStart ||
712 designSize > rangeEnd ||
713 subfamilyNameID < 256 ||
714 subfamilyNameID > 32767)
715 return_trace (false);
716 else
717 return_trace (true);
718 }
719
720 bool subset (hb_subset_context_t *c) const
721 {
722 TRACE_SUBSET (this);
723 return_trace ((bool) c->serializer->embed (*this));
724 }
725
726 HBUINT16 designSize; /* Represents the design size in 720/inch
727 * units (decipoints). The design size entry
728 * must be non-zero. When there is a design
729 * size but no recommended size range, the
730 * rest of the array will consist of zeros. */
731 HBUINT16 subfamilyID; /* Has no independent meaning, but serves
732 * as an identifier that associates fonts
733 * in a subfamily. All fonts which share a
734 * Preferred or Font Family name and which
735 * differ only by size range shall have the
736 * same subfamily value, and no fonts which
737 * differ in weight or style shall have the
738 * same subfamily value. If this value is
739 * zero, the remaining fields in the array
740 * will be ignored. */
741 NameID subfamilyNameID;/* If the preceding value is non-zero, this
742 * value must be set in the range 256 - 32767
743 * (inclusive). It records the value of a
744 * field in the name table, which must
745 * contain English-language strings encoded
746 * in Windows Unicode and Macintosh Roman,
747 * and may contain additional strings
748 * localized to other scripts and languages.
749 * Each of these strings is the name an
750 * application should use, in combination
751 * with the family name, to represent the
752 * subfamily in a menu. Applications will
753 * choose the appropriate version based on
754 * their selection criteria. */
755 HBUINT16 rangeStart; /* Large end of the recommended usage range
756 * (inclusive), stored in 720/inch units
757 * (decipoints). */
758 HBUINT16 rangeEnd; /* Small end of the recommended usage range
759 (exclusive), stored in 720/inch units
760 * (decipoints). */
761 public:
762 DEFINE_SIZE_STATIC (10);
763};
764
765/* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
766struct FeatureParamsStylisticSet
767{
768 bool sanitize (hb_sanitize_context_t *c) const
769 {
770 TRACE_SANITIZE (this);
771 /* Right now minorVersion is at zero. Which means, any table supports
772 * the uiNameID field. */
773 return_trace (c->check_struct (this));
774 }
775
776 bool subset (hb_subset_context_t *c) const
777 {
778 TRACE_SUBSET (this);
779 return_trace ((bool) c->serializer->embed (*this));
780 }
781
782 HBUINT16 version; /* (set to 0): This corresponds to a “minor”
783 * version number. Additional data may be
784 * added to the end of this Feature Parameters
785 * table in the future. */
786
787 NameID uiNameID; /* The 'name' table name ID that specifies a
788 * string (or strings, for multiple languages)
789 * for a user-interface label for this
790 * feature. The values of uiLabelNameId and
791 * sampleTextNameId are expected to be in the
792 * font-specific name ID range (256-32767),
793 * though that is not a requirement in this
794 * Feature Parameters specification. The
795 * user-interface label for the feature can
796 * be provided in multiple languages. An
797 * English string should be included as a
798 * fallback. The string should be kept to a
799 * minimal length to fit comfortably with
800 * different application interfaces. */
801 public:
802 DEFINE_SIZE_STATIC (4);
803};
804
805/* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
806struct FeatureParamsCharacterVariants
807{
808 unsigned
809 get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
810 {
811 if (char_count)
812 {
813 + characters.sub_array (start_offset, char_count)
814 | hb_sink (hb_array (chars, *char_count))
815 ;
816 }
817 return characters.len;
818 }
819
820 unsigned get_size () const
821 { return min_size + characters.len * HBUINT24::static_size; }
822
823 bool subset (hb_subset_context_t *c) const
824 {
825 TRACE_SUBSET (this);
826 return_trace ((bool) c->serializer->embed (*this));
827 }
828
829 bool sanitize (hb_sanitize_context_t *c) const
830 {
831 TRACE_SANITIZE (this);
832 return_trace (c->check_struct (this) &&
833 characters.sanitize (c));
834 }
835
836 HBUINT16 format; /* Format number is set to 0. */
837 NameID featUILableNameID; /* The ‘name’ table name ID that
838 * specifies a string (or strings,
839 * for multiple languages) for a
840 * user-interface label for this
841 * feature. (May be NULL.) */
842 NameID featUITooltipTextNameID;/* The ‘name’ table name ID that
843 * specifies a string (or strings,
844 * for multiple languages) that an
845 * application can use for tooltip
846 * text for this feature. (May be
847 * nullptr.) */
848 NameID sampleTextNameID; /* The ‘name’ table name ID that
849 * specifies sample text that
850 * illustrates the effect of this
851 * feature. (May be NULL.) */
852 HBUINT16 numNamedParameters; /* Number of named parameters. (May
853 * be zero.) */
854 NameID firstParamUILabelNameID;/* The first ‘name’ table name ID
855 * used to specify strings for
856 * user-interface labels for the
857 * feature parameters. (Must be zero
858 * if numParameters is zero.) */
859 ArrayOf<HBUINT24>
860 characters; /* Array of the Unicode Scalar Value
861 * of the characters for which this
862 * feature provides glyph variants.
863 * (May be zero.) */
864 public:
865 DEFINE_SIZE_ARRAY (14, characters);
866};
867
868struct FeatureParams
869{
870 bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
871 {
872#ifdef HB_NO_LAYOUT_FEATURE_PARAMS
873 return true;
874#endif
875 TRACE_SANITIZE (this);
876 if (tag == HB_TAG ('s','i','z','e'))
877 return_trace (u.size.sanitize (c));
878 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
879 return_trace (u.stylisticSet.sanitize (c));
880 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
881 return_trace (u.characterVariants.sanitize (c));
882 return_trace (true);
883 }
884
885 bool subset (hb_subset_context_t *c, const Tag* tag) const
886 {
887 TRACE_SUBSET (this);
888 if (!tag) return_trace (false);
889 if (*tag == HB_TAG ('s','i','z','e'))
890 return_trace (u.size.subset (c));
891 if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
892 return_trace (u.stylisticSet.subset (c));
893 if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
894 return_trace (u.characterVariants.subset (c));
895 return_trace (false);
896 }
897
898#ifndef HB_NO_LAYOUT_FEATURE_PARAMS
899 const FeatureParamsSize& get_size_params (hb_tag_t tag) const
900 {
901 if (tag == HB_TAG ('s','i','z','e'))
902 return u.size;
903 return Null (FeatureParamsSize);
904 }
905 const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
906 {
907 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
908 return u.stylisticSet;
909 return Null (FeatureParamsStylisticSet);
910 }
911 const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
912 {
913 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
914 return u.characterVariants;
915 return Null (FeatureParamsCharacterVariants);
916 }
917#endif
918
919 private:
920 union {
921 FeatureParamsSize size;
922 FeatureParamsStylisticSet stylisticSet;
923 FeatureParamsCharacterVariants characterVariants;
924 } u;
925 public:
926 DEFINE_SIZE_MIN (0);
927};
928
929struct Feature
930{
931 unsigned int get_lookup_count () const
932 { return lookupIndex.len; }
933 hb_tag_t get_lookup_index (unsigned int i) const
934 { return lookupIndex[i]; }
935 unsigned int get_lookup_indexes (unsigned int start_index,
936 unsigned int *lookup_count /* IN/OUT */,
937 unsigned int *lookup_tags /* OUT */) const
938 { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
939 void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
940 { lookupIndex.add_indexes_to (lookup_indexes); }
941
942 const FeatureParams &get_feature_params () const
943 { return this+featureParams; }
944
945 bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
946 { return lookupIndex.intersects (lookup_indexes); }
947
948 bool subset (hb_subset_context_t *c,
949 hb_subset_layout_context_t *l,
950 const Tag *tag = nullptr) const
951 {
952 TRACE_SUBSET (this);
953 auto *out = c->serializer->start_embed (*this);
954 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
955
956 bool subset_featureParams = out->featureParams.serialize_subset (c, featureParams, this, tag);
957
958 auto it =
959 + hb_iter (lookupIndex)
960 | hb_filter (l->lookup_index_map)
961 | hb_map (l->lookup_index_map)
962 ;
963
964 out->lookupIndex.serialize (c->serializer, l, it);
965 return_trace (bool (it) || subset_featureParams
966 || (tag && *tag == HB_TAG ('p', 'r', 'e', 'f')));
967 }
968
969 bool sanitize (hb_sanitize_context_t *c,
970 const Record_sanitize_closure_t *closure = nullptr) const
971 {
972 TRACE_SANITIZE (this);
973 if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
974 return_trace (false);
975
976 /* Some earlier versions of Adobe tools calculated the offset of the
977 * FeatureParams subtable from the beginning of the FeatureList table!
978 *
979 * If sanitizing "failed" for the FeatureParams subtable, try it with the
980 * alternative location. We would know sanitize "failed" if old value
981 * of the offset was non-zero, but it's zeroed now.
982 *
983 * Only do this for the 'size' feature, since at the time of the faulty
984 * Adobe tools, only the 'size' feature had FeatureParams defined.
985 */
986
987 if (likely (featureParams.is_null ()))
988 return_trace (true);
989
990 unsigned int orig_offset = featureParams;
991 if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
992 return_trace (false);
993
994 if (featureParams == 0 && closure &&
995 closure->tag == HB_TAG ('s','i','z','e') &&
996 closure->list_base && closure->list_base < this)
997 {
998 unsigned int new_offset_int = orig_offset -
999 (((char *) this) - ((char *) closure->list_base));
1000
1001 OffsetTo<FeatureParams> new_offset;
1002 /* Check that it would not overflow. */
1003 new_offset = new_offset_int;
1004 if (new_offset == new_offset_int &&
1005 c->try_set (&featureParams, new_offset_int) &&
1006 !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
1007 return_trace (false);
1008 }
1009
1010 return_trace (true);
1011 }
1012
1013 OffsetTo<FeatureParams>
1014 featureParams; /* Offset to Feature Parameters table (if one
1015 * has been defined for the feature), relative
1016 * to the beginning of the Feature Table; = Null
1017 * if not required */
1018 IndexArray lookupIndex; /* Array of LookupList indices */
1019 public:
1020 DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
1021};
1022
1023typedef RecordListOf<Feature> FeatureList;
1024
1025
1026struct LookupFlag : HBUINT16
1027{
1028 enum Flags {
1029 RightToLeft = 0x0001u,
1030 IgnoreBaseGlyphs = 0x0002u,
1031 IgnoreLigatures = 0x0004u,
1032 IgnoreMarks = 0x0008u,
1033 IgnoreFlags = 0x000Eu,
1034 UseMarkFilteringSet = 0x0010u,
1035 Reserved = 0x00E0u,
1036 MarkAttachmentType = 0xFF00u
1037 };
1038 public:
1039 DEFINE_SIZE_STATIC (2);
1040};
1041
1042} /* namespace OT */
1043/* This has to be outside the namespace. */
1044HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
1045namespace OT {
1046
1047struct Lookup
1048{
1049 unsigned int get_subtable_count () const { return subTable.len; }
1050
1051 template <typename TSubTable>
1052 const OffsetArrayOf<TSubTable>& get_subtables () const
1053 { return reinterpret_cast<const OffsetArrayOf<TSubTable> &> (subTable); }
1054 template <typename TSubTable>
1055 OffsetArrayOf<TSubTable>& get_subtables ()
1056 { return reinterpret_cast<OffsetArrayOf<TSubTable> &> (subTable); }
1057
1058 template <typename TSubTable>
1059 const TSubTable& get_subtable (unsigned int i) const
1060 { return this+get_subtables<TSubTable> ()[i]; }
1061 template <typename TSubTable>
1062 TSubTable& get_subtable (unsigned int i)
1063 { return this+get_subtables<TSubTable> ()[i]; }
1064
1065 unsigned int get_size () const
1066 {
1067 const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1068 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1069 return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1070 return (const char *) &markFilteringSet - (const char *) this;
1071 }
1072
1073 unsigned int get_type () const { return lookupType; }
1074
1075 /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1076 * higher 16-bit is mark-filtering-set if the lookup uses one.
1077 * Not to be confused with glyph_props which is very similar. */
1078 uint32_t get_props () const
1079 {
1080 unsigned int flag = lookupFlag;
1081 if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
1082 {
1083 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1084 flag += (markFilteringSet << 16);
1085 }
1086 return flag;
1087 }
1088
1089 template <typename TSubTable, typename context_t, typename ...Ts>
1090 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1091 {
1092 unsigned int lookup_type = get_type ();
1093 TRACE_DISPATCH (this, lookup_type);
1094 unsigned int count = get_subtable_count ();
1095 for (unsigned int i = 0; i < count; i++) {
1096 typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, hb_forward<Ts> (ds)...);
1097 if (c->stop_sublookup_iteration (r))
1098 return_trace (r);
1099 }
1100 return_trace (c->default_return_value ());
1101 }
1102
1103 bool serialize (hb_serialize_context_t *c,
1104 unsigned int lookup_type,
1105 uint32_t lookup_props,
1106 unsigned int num_subtables)
1107 {
1108 TRACE_SERIALIZE (this);
1109 if (unlikely (!c->extend_min (*this))) return_trace (false);
1110 lookupType = lookup_type;
1111 lookupFlag = lookup_props & 0xFFFFu;
1112 if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1113 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1114 {
1115 if (unlikely (!c->extend (*this))) return_trace (false);
1116 HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1117 markFilteringSet = lookup_props >> 16;
1118 }
1119 return_trace (true);
1120 }
1121
1122 template <typename TSubTable>
1123 bool subset (hb_subset_context_t *c) const
1124 {
1125 TRACE_SUBSET (this);
1126 auto *out = c->serializer->start_embed (*this);
1127 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1128 out->lookupType = lookupType;
1129 out->lookupFlag = lookupFlag;
1130
1131 const hb_set_t *glyphset = c->plan->glyphset ();
1132 unsigned int lookup_type = get_type ();
1133 + hb_iter (get_subtables <TSubTable> ())
1134 | hb_filter ([this, glyphset, lookup_type] (const OffsetTo<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1135 | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1136 ;
1137
1138 return_trace (true);
1139 }
1140
1141 template <typename TSubTable>
1142 bool sanitize (hb_sanitize_context_t *c) const
1143 {
1144 TRACE_SANITIZE (this);
1145 if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1146
1147 unsigned subtables = get_subtable_count ();
1148 if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1149
1150 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1151 {
1152 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1153 if (!markFilteringSet.sanitize (c)) return_trace (false);
1154 }
1155
1156 if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1157 return_trace (false);
1158
1159 if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
1160 {
1161 /* The spec says all subtables of an Extension lookup should
1162 * have the same type, which shall not be the Extension type
1163 * itself (but we already checked for that).
1164 * This is specially important if one has a reverse type!
1165 *
1166 * We only do this if sanitizer edit_count is zero. Otherwise,
1167 * some of the subtables might have become insane after they
1168 * were sanity-checked by the edits of subsequent subtables.
1169 * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1170 */
1171 unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1172 for (unsigned int i = 1; i < subtables; i++)
1173 if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1174 return_trace (false);
1175 }
1176 return_trace (true);
1177 }
1178
1179 private:
1180 HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */
1181 HBUINT16 lookupFlag; /* Lookup qualifiers */
1182 ArrayOf<Offset16>
1183 subTable; /* Array of SubTables */
1184/*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1185 * structure. This field is only present if bit
1186 * UseMarkFilteringSet of lookup flags is set. */
1187 public:
1188 DEFINE_SIZE_ARRAY (6, subTable);
1189};
1190
1191typedef OffsetListOf<Lookup> LookupList;
1192
1193template <typename TLookup>
1194struct LookupOffsetList : OffsetListOf<TLookup>
1195{
1196 bool subset (hb_subset_context_t *c,
1197 hb_subset_layout_context_t *l) const
1198 {
1199 TRACE_SUBSET (this);
1200 auto *out = c->serializer->start_embed (this);
1201 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1202
1203 unsigned count = this->len;
1204 + hb_zip (*this, hb_range (count))
1205 | hb_filter (l->lookup_index_map, hb_second)
1206 | hb_map (hb_first)
1207 | hb_apply (subset_offset_array (c, *out, this))
1208 ;
1209 return_trace (true);
1210 }
1211
1212 bool sanitize (hb_sanitize_context_t *c) const
1213 {
1214 TRACE_SANITIZE (this);
1215 return_trace (OffsetListOf<TLookup>::sanitize (c, this));
1216 }
1217};
1218
1219
1220/*
1221 * Coverage Table
1222 */
1223
1224struct CoverageFormat1
1225{
1226 friend struct Coverage;
1227
1228 private:
1229 unsigned int get_coverage (hb_codepoint_t glyph_id) const
1230 {
1231 unsigned int i;
1232 glyphArray.bfind (glyph_id, &i, HB_BFIND_NOT_FOUND_STORE, NOT_COVERED);
1233 return i;
1234 }
1235
1236 template <typename Iterator,
1237 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1238 bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1239 {
1240 TRACE_SERIALIZE (this);
1241 return_trace (glyphArray.serialize (c, glyphs));
1242 }
1243
1244 bool sanitize (hb_sanitize_context_t *c) const
1245 {
1246 TRACE_SANITIZE (this);
1247 return_trace (glyphArray.sanitize (c));
1248 }
1249
1250 bool intersects (const hb_set_t *glyphs) const
1251 {
1252 /* TODO Speed up, using hb_set_next() and bsearch()? */
1253 unsigned int count = glyphArray.len;
1254 for (unsigned int i = 0; i < count; i++)
1255 if (glyphs->has (glyphArray[i]))
1256 return true;
1257 return false;
1258 }
1259 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1260 { return glyphs->has (glyphArray[index]); }
1261
1262 template <typename set_t>
1263 bool collect_coverage (set_t *glyphs) const
1264 { return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len); }
1265
1266 public:
1267 /* Older compilers need this to be public. */
1268 struct iter_t
1269 {
1270 void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
1271 void fini () {}
1272 bool more () const { return i < c->glyphArray.len; }
1273 void next () { i++; }
1274 hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
1275 bool operator != (const iter_t& o) const
1276 { return i != o.i || c != o.c; }
1277
1278 private:
1279 const struct CoverageFormat1 *c;
1280 unsigned int i;
1281 };
1282 private:
1283
1284 protected:
1285 HBUINT16 coverageFormat; /* Format identifier--format = 1 */
1286 SortedArrayOf<HBGlyphID>
1287 glyphArray; /* Array of GlyphIDs--in numerical order */
1288 public:
1289 DEFINE_SIZE_ARRAY (4, glyphArray);
1290};
1291
1292struct CoverageFormat2
1293{
1294 friend struct Coverage;
1295
1296 private:
1297 unsigned int get_coverage (hb_codepoint_t glyph_id) const
1298 {
1299 const RangeRecord &range = rangeRecord.bsearch (glyph_id);
1300 return likely (range.first <= range.last)
1301 ? (unsigned int) range.value + (glyph_id - range.first)
1302 : NOT_COVERED;
1303 }
1304
1305 template <typename Iterator,
1306 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1307 bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1308 {
1309 TRACE_SERIALIZE (this);
1310 if (unlikely (!c->extend_min (*this))) return_trace (false);
1311
1312 if (unlikely (!glyphs))
1313 {
1314 rangeRecord.len = 0;
1315 return_trace (true);
1316 }
1317
1318 /* TODO(iter) Write more efficiently? */
1319
1320 unsigned num_ranges = 0;
1321 hb_codepoint_t last = (hb_codepoint_t) -2;
1322 for (auto g: glyphs)
1323 {
1324 if (last + 1 != g)
1325 num_ranges++;
1326 last = g;
1327 }
1328
1329 if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
1330
1331 unsigned count = 0;
1332 unsigned range = (unsigned) -1;
1333 last = (hb_codepoint_t) -2;
1334 for (auto g: glyphs)
1335 {
1336 if (last + 1 != g)
1337 {
1338 range++;
1339 rangeRecord[range].first = g;
1340 rangeRecord[range].value = count;
1341 }
1342 rangeRecord[range].last = g;
1343 last = g;
1344 count++;
1345 }
1346
1347 return_trace (true);
1348 }
1349
1350 bool sanitize (hb_sanitize_context_t *c) const
1351 {
1352 TRACE_SANITIZE (this);
1353 return_trace (rangeRecord.sanitize (c));
1354 }
1355
1356 bool intersects (const hb_set_t *glyphs) const
1357 {
1358 /* TODO Speed up, using hb_set_next() and bsearch()? */
1359 unsigned int count = rangeRecord.len;
1360 for (unsigned int i = 0; i < count; i++)
1361 if (rangeRecord[i].intersects (glyphs))
1362 return true;
1363 return false;
1364 }
1365 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1366 {
1367 unsigned int i;
1368 unsigned int count = rangeRecord.len;
1369 for (i = 0; i < count; i++) {
1370 const RangeRecord &range = rangeRecord[i];
1371 if (range.value <= index &&
1372 index < (unsigned int) range.value + (range.last - range.first) &&
1373 range.intersects (glyphs))
1374 return true;
1375 else if (index < range.value)
1376 return false;
1377 }
1378 return false;
1379 }
1380
1381 template <typename set_t>
1382 bool collect_coverage (set_t *glyphs) const
1383 {
1384 unsigned int count = rangeRecord.len;
1385 for (unsigned int i = 0; i < count; i++)
1386 if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1387 return false;
1388 return true;
1389 }
1390
1391 public:
1392 /* Older compilers need this to be public. */
1393 struct iter_t
1394 {
1395 void init (const CoverageFormat2 &c_)
1396 {
1397 c = &c_;
1398 coverage = 0;
1399 i = 0;
1400 j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
1401 if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
1402 {
1403 /* Broken table. Skip. */
1404 i = c->rangeRecord.len;
1405 }
1406 }
1407 void fini () {}
1408 bool more () const { return i < c->rangeRecord.len; }
1409 void next ()
1410 {
1411 if (j >= c->rangeRecord[i].last)
1412 {
1413 i++;
1414 if (more ())
1415 {
1416 unsigned int old = coverage;
1417 j = c->rangeRecord[i].first;
1418 coverage = c->rangeRecord[i].value;
1419 if (unlikely (coverage != old + 1))
1420 {
1421 /* Broken table. Skip. Important to avoid DoS.
1422 * Also, our callers depend on coverage being
1423 * consecutive and monotonically increasing,
1424 * ie. iota(). */
1425 i = c->rangeRecord.len;
1426 return;
1427 }
1428 }
1429 return;
1430 }
1431 coverage++;
1432 j++;
1433 }
1434 hb_codepoint_t get_glyph () const { return j; }
1435 bool operator != (const iter_t& o) const
1436 { return i != o.i || j != o.j || c != o.c; }
1437
1438 private:
1439 const struct CoverageFormat2 *c;
1440 unsigned int i, coverage;
1441 hb_codepoint_t j;
1442 };
1443 private:
1444
1445 protected:
1446 HBUINT16 coverageFormat; /* Format identifier--format = 2 */
1447 SortedArrayOf<RangeRecord>
1448 rangeRecord; /* Array of glyph ranges--ordered by
1449 * Start GlyphID. rangeCount entries
1450 * long */
1451 public:
1452 DEFINE_SIZE_ARRAY (4, rangeRecord);
1453};
1454
1455struct Coverage
1456{
1457 /* Has interface. */
1458 static constexpr unsigned SENTINEL = NOT_COVERED;
1459 typedef unsigned int value_t;
1460 value_t operator [] (hb_codepoint_t k) const { return get (k); }
1461 bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1462 /* Predicate. */
1463 bool operator () (hb_codepoint_t k) const { return has (k); }
1464
1465 unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
1466 unsigned int get_coverage (hb_codepoint_t glyph_id) const
1467 {
1468 switch (u.format) {
1469 case 1: return u.format1.get_coverage (glyph_id);
1470 case 2: return u.format2.get_coverage (glyph_id);
1471 default:return NOT_COVERED;
1472 }
1473 }
1474
1475 template <typename Iterator,
1476 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
1477 bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1478 {
1479 TRACE_SERIALIZE (this);
1480 if (unlikely (!c->extend_min (*this))) return_trace (false);
1481
1482 unsigned count = 0;
1483 unsigned num_ranges = 0;
1484 hb_codepoint_t last = (hb_codepoint_t) -2;
1485 for (auto g: glyphs)
1486 {
1487 if (last + 1 != g)
1488 num_ranges++;
1489 last = g;
1490 count++;
1491 }
1492 u.format = count <= num_ranges * 3 ? 1 : 2;
1493
1494 switch (u.format)
1495 {
1496 case 1: return_trace (u.format1.serialize (c, glyphs));
1497 case 2: return_trace (u.format2.serialize (c, glyphs));
1498 default:return_trace (false);
1499 }
1500 }
1501
1502 bool subset (hb_subset_context_t *c) const
1503 {
1504 TRACE_SUBSET (this);
1505 const hb_set_t &glyphset = *c->plan->glyphset ();
1506 const hb_map_t &glyph_map = *c->plan->glyph_map;
1507
1508 auto it =
1509 + iter ()
1510 | hb_filter (glyphset)
1511 | hb_map_retains_sorting (glyph_map)
1512 ;
1513
1514 bool ret = bool (it);
1515 Coverage_serialize (c->serializer, it);
1516 return_trace (ret);
1517 }
1518
1519 bool sanitize (hb_sanitize_context_t *c) const
1520 {
1521 TRACE_SANITIZE (this);
1522 if (!u.format.sanitize (c)) return_trace (false);
1523 switch (u.format)
1524 {
1525 case 1: return_trace (u.format1.sanitize (c));
1526 case 2: return_trace (u.format2.sanitize (c));
1527 default:return_trace (true);
1528 }
1529 }
1530
1531 bool intersects (const hb_set_t *glyphs) const
1532 {
1533 switch (u.format)
1534 {
1535 case 1: return u.format1.intersects (glyphs);
1536 case 2: return u.format2.intersects (glyphs);
1537 default:return false;
1538 }
1539 }
1540 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1541 {
1542 switch (u.format)
1543 {
1544 case 1: return u.format1.intersects_coverage (glyphs, index);
1545 case 2: return u.format2.intersects_coverage (glyphs, index);
1546 default:return false;
1547 }
1548 }
1549
1550 /* Might return false if array looks unsorted.
1551 * Used for faster rejection of corrupt data. */
1552 template <typename set_t>
1553 bool collect_coverage (set_t *glyphs) const
1554 {
1555 switch (u.format)
1556 {
1557 case 1: return u.format1.collect_coverage (glyphs);
1558 case 2: return u.format2.collect_coverage (glyphs);
1559 default:return false;
1560 }
1561 }
1562
1563 struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
1564 {
1565 static constexpr bool is_sorted_iterator = true;
1566 iter_t (const Coverage &c_ = Null (Coverage))
1567 {
1568 memset (this, 0, sizeof (*this));
1569 format = c_.u.format;
1570 switch (format)
1571 {
1572 case 1: u.format1.init (c_.u.format1); return;
1573 case 2: u.format2.init (c_.u.format2); return;
1574 default: return;
1575 }
1576 }
1577 bool __more__ () const
1578 {
1579 switch (format)
1580 {
1581 case 1: return u.format1.more ();
1582 case 2: return u.format2.more ();
1583 default:return false;
1584 }
1585 }
1586 void __next__ ()
1587 {
1588 switch (format)
1589 {
1590 case 1: u.format1.next (); break;
1591 case 2: u.format2.next (); break;
1592 default: break;
1593 }
1594 }
1595 typedef hb_codepoint_t __item_t__;
1596 __item_t__ __item__ () const { return get_glyph (); }
1597
1598 hb_codepoint_t get_glyph () const
1599 {
1600 switch (format)
1601 {
1602 case 1: return u.format1.get_glyph ();
1603 case 2: return u.format2.get_glyph ();
1604 default:return 0;
1605 }
1606 }
1607 bool operator != (const iter_t& o) const
1608 {
1609 if (format != o.format) return true;
1610 switch (format)
1611 {
1612 case 1: return u.format1 != o.u.format1;
1613 case 2: return u.format2 != o.u.format2;
1614 default:return false;
1615 }
1616 }
1617
1618 private:
1619 unsigned int format;
1620 union {
1621 CoverageFormat2::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */
1622 CoverageFormat1::iter_t format1;
1623 } u;
1624 };
1625 iter_t iter () const { return iter_t (*this); }
1626
1627 protected:
1628 union {
1629 HBUINT16 format; /* Format identifier */
1630 CoverageFormat1 format1;
1631 CoverageFormat2 format2;
1632 } u;
1633 public:
1634 DEFINE_SIZE_UNION (2, format);
1635};
1636
1637template<typename Iterator>
1638static inline void
1639Coverage_serialize (hb_serialize_context_t *c,
1640 Iterator it)
1641{ c->start_embed<Coverage> ()->serialize (c, it); }
1642
1643static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1644 const hb_set_t &glyphset,
1645 const hb_map_t &gid_klass_map,
1646 hb_sorted_vector_t<HBGlyphID> &glyphs,
1647 const hb_set_t &klasses,
1648 hb_map_t *klass_map /*INOUT*/)
1649{
1650 if (!klass_map)
1651 {
1652 ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter ()
1653 | hb_map (gid_klass_map)));
1654 return;
1655 }
1656
1657 /* any glyph not assigned a class value falls into Class zero (0),
1658 * if any glyph assigned to class 0, remapping must start with 0->0*/
1659 if (glyphset.get_population () > gid_klass_map.get_population ())
1660 klass_map->set (0, 0);
1661
1662 unsigned idx = klass_map->has (0) ? 1 : 0;
1663 for (const unsigned k: klasses.iter ())
1664 {
1665 if (klass_map->has (k)) continue;
1666 klass_map->set (k, idx);
1667 idx++;
1668 }
1669
1670 auto it =
1671 + glyphs.iter ()
1672 | hb_map_retains_sorting ([&] (const HBGlyphID& gid) -> hb_pair_t<hb_codepoint_t, unsigned>
1673 {
1674 unsigned new_klass = klass_map->get (gid_klass_map[gid]);
1675 return hb_pair ((hb_codepoint_t)gid, new_klass);
1676 })
1677 ;
1678
1679 c->propagate_error (glyphs, klasses);
1680 ClassDef_serialize (c, it);
1681}
1682
1683/*
1684 * Class Definition Table
1685 */
1686
1687struct ClassDefFormat1
1688{
1689 friend struct ClassDef;
1690
1691 private:
1692 unsigned int get_class (hb_codepoint_t glyph_id) const
1693 {
1694 return classValue[(unsigned int) (glyph_id - startGlyph)];
1695 }
1696
1697 template<typename Iterator,
1698 hb_requires (hb_is_iterator (Iterator))>
1699 bool serialize (hb_serialize_context_t *c,
1700 Iterator it)
1701 {
1702 TRACE_SERIALIZE (this);
1703 if (unlikely (!c->extend_min (*this))) return_trace (false);
1704
1705 if (unlikely (!it))
1706 {
1707 startGlyph = 0;
1708 classValue.len = 0;
1709 return_trace (true);
1710 }
1711
1712 hb_codepoint_t glyph_min = (*it).first;
1713 hb_codepoint_t glyph_max = + it
1714 | hb_map (hb_first)
1715 | hb_reduce (hb_max, 0u);
1716 unsigned glyph_count = glyph_max - glyph_min + 1;
1717
1718 startGlyph = glyph_min;
1719 if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
1720 for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
1721 {
1722 unsigned idx = gid_klass_pair.first - glyph_min;
1723 classValue[idx] = gid_klass_pair.second;
1724 }
1725 return_trace (true);
1726 }
1727
1728 bool subset (hb_subset_context_t *c,
1729 hb_map_t *klass_map = nullptr /*OUT*/) const
1730 {
1731 TRACE_SUBSET (this);
1732 const hb_set_t &glyphset = *c->plan->_glyphset_gsub;
1733 const hb_map_t &glyph_map = *c->plan->glyph_map;
1734
1735 hb_sorted_vector_t<HBGlyphID> glyphs;
1736 hb_set_t orig_klasses;
1737 hb_map_t gid_org_klass_map;
1738
1739 hb_codepoint_t start = startGlyph;
1740 hb_codepoint_t end = start + classValue.len;
1741 for (const hb_codepoint_t gid : + hb_range (start, end)
1742 | hb_filter (glyphset))
1743 {
1744 unsigned klass = classValue[gid - start];
1745 if (!klass) continue;
1746
1747 glyphs.push (glyph_map[gid]);
1748 gid_org_klass_map.set (glyph_map[gid], klass);
1749 orig_klasses.add (klass);
1750 }
1751
1752 ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
1753 glyphs, orig_klasses, klass_map);
1754 return_trace ((bool) glyphs);
1755 }
1756
1757 bool sanitize (hb_sanitize_context_t *c) const
1758 {
1759 TRACE_SANITIZE (this);
1760 return_trace (c->check_struct (this) && classValue.sanitize (c));
1761 }
1762
1763 template <typename set_t>
1764 bool collect_coverage (set_t *glyphs) const
1765 {
1766 unsigned int start = 0;
1767 unsigned int count = classValue.len;
1768 for (unsigned int i = 0; i < count; i++)
1769 {
1770 if (classValue[i])
1771 continue;
1772
1773 if (start != i)
1774 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
1775 return false;
1776
1777 start = i + 1;
1778 }
1779 if (start != count)
1780 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
1781 return false;
1782
1783 return true;
1784 }
1785
1786 template <typename set_t>
1787 bool collect_class (set_t *glyphs, unsigned int klass) const
1788 {
1789 unsigned int count = classValue.len;
1790 for (unsigned int i = 0; i < count; i++)
1791 if (classValue[i] == klass) glyphs->add (startGlyph + i);
1792 return true;
1793 }
1794
1795 bool intersects (const hb_set_t *glyphs) const
1796 {
1797 /* TODO Speed up, using hb_set_next()? */
1798 hb_codepoint_t start = startGlyph;
1799 hb_codepoint_t end = startGlyph + classValue.len;
1800 for (hb_codepoint_t iter = startGlyph - 1;
1801 hb_set_next (glyphs, &iter) && iter < end;)
1802 if (classValue[iter - start]) return true;
1803 return false;
1804 }
1805 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1806 {
1807 unsigned int count = classValue.len;
1808 if (klass == 0)
1809 {
1810 /* Match if there's any glyph that is not listed! */
1811 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1812 if (!hb_set_next (glyphs, &g)) return false;
1813 if (g < startGlyph) return true;
1814 g = startGlyph + count - 1;
1815 if (hb_set_next (glyphs, &g)) return true;
1816 /* Fall through. */
1817 }
1818 for (unsigned int i = 0; i < count; i++)
1819 if (classValue[i] == klass && glyphs->has (startGlyph + i))
1820 return true;
1821 return false;
1822 }
1823
1824 protected:
1825 HBUINT16 classFormat; /* Format identifier--format = 1 */
1826 HBGlyphID startGlyph; /* First GlyphID of the classValueArray */
1827 ArrayOf<HBUINT16>
1828 classValue; /* Array of Class Values--one per GlyphID */
1829 public:
1830 DEFINE_SIZE_ARRAY (6, classValue);
1831};
1832
1833struct ClassDefFormat2
1834{
1835 friend struct ClassDef;
1836
1837 private:
1838 unsigned int get_class (hb_codepoint_t glyph_id) const
1839 {
1840 return rangeRecord.bsearch (glyph_id).value;
1841 }
1842
1843 template<typename Iterator,
1844 hb_requires (hb_is_iterator (Iterator))>
1845 bool serialize (hb_serialize_context_t *c,
1846 Iterator it)
1847 {
1848 TRACE_SERIALIZE (this);
1849 if (unlikely (!c->extend_min (*this))) return_trace (false);
1850
1851 if (unlikely (!it))
1852 {
1853 rangeRecord.len = 0;
1854 return_trace (true);
1855 }
1856
1857 unsigned num_ranges = 1;
1858 hb_codepoint_t prev_gid = (*it).first;
1859 unsigned prev_klass = (*it).second;
1860
1861 RangeRecord range_rec;
1862 range_rec.first = prev_gid;
1863 range_rec.last = prev_gid;
1864 range_rec.value = prev_klass;
1865
1866 RangeRecord *record = c->copy (range_rec);
1867 if (unlikely (!record)) return_trace (false);
1868
1869 for (const auto gid_klass_pair : + (++it))
1870 {
1871 hb_codepoint_t cur_gid = gid_klass_pair.first;
1872 unsigned cur_klass = gid_klass_pair.second;
1873
1874 if (cur_gid != prev_gid + 1 ||
1875 cur_klass != prev_klass)
1876 {
1877 if (unlikely (!record)) break;
1878 record->last = prev_gid;
1879 num_ranges++;
1880
1881 range_rec.first = cur_gid;
1882 range_rec.last = cur_gid;
1883 range_rec.value = cur_klass;
1884
1885 record = c->copy (range_rec);
1886 }
1887
1888 prev_klass = cur_klass;
1889 prev_gid = cur_gid;
1890 }
1891
1892 if (likely (record)) record->last = prev_gid;
1893 rangeRecord.len = num_ranges;
1894 return_trace (true);
1895 }
1896
1897 bool subset (hb_subset_context_t *c,
1898 hb_map_t *klass_map = nullptr /*OUT*/) const
1899 {
1900 TRACE_SUBSET (this);
1901 const hb_set_t &glyphset = *c->plan->_glyphset_gsub;
1902 const hb_map_t &glyph_map = *c->plan->glyph_map;
1903
1904 hb_sorted_vector_t<HBGlyphID> glyphs;
1905 hb_set_t orig_klasses;
1906 hb_map_t gid_org_klass_map;
1907
1908 unsigned count = rangeRecord.len;
1909 for (unsigned i = 0; i < count; i++)
1910 {
1911 unsigned klass = rangeRecord[i].value;
1912 if (!klass) continue;
1913 hb_codepoint_t start = rangeRecord[i].first;
1914 hb_codepoint_t end = rangeRecord[i].last + 1;
1915 for (hb_codepoint_t g = start; g < end; g++)
1916 {
1917 if (!glyphset.has (g)) continue;
1918 glyphs.push (glyph_map[g]);
1919 gid_org_klass_map.set (glyph_map[g], klass);
1920 orig_klasses.add (klass);
1921 }
1922 }
1923
1924 ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
1925 glyphs, orig_klasses, klass_map);
1926 return_trace ((bool) glyphs);
1927 }
1928
1929 bool sanitize (hb_sanitize_context_t *c) const
1930 {
1931 TRACE_SANITIZE (this);
1932 return_trace (rangeRecord.sanitize (c));
1933 }
1934
1935 template <typename set_t>
1936 bool collect_coverage (set_t *glyphs) const
1937 {
1938 unsigned int count = rangeRecord.len;
1939 for (unsigned int i = 0; i < count; i++)
1940 if (rangeRecord[i].value)
1941 if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1942 return false;
1943 return true;
1944 }
1945
1946 template <typename set_t>
1947 bool collect_class (set_t *glyphs, unsigned int klass) const
1948 {
1949 unsigned int count = rangeRecord.len;
1950 for (unsigned int i = 0; i < count; i++)
1951 {
1952 if (rangeRecord[i].value == klass)
1953 if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1954 return false;
1955 }
1956 return true;
1957 }
1958
1959 bool intersects (const hb_set_t *glyphs) const
1960 {
1961 /* TODO Speed up, using hb_set_next() and bsearch()? */
1962 unsigned int count = rangeRecord.len;
1963 for (unsigned int i = 0; i < count; i++)
1964 if (rangeRecord[i].intersects (glyphs))
1965 return true;
1966 return false;
1967 }
1968 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1969 {
1970 unsigned int count = rangeRecord.len;
1971 if (klass == 0)
1972 {
1973 /* Match if there's any glyph that is not listed! */
1974 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1975 for (unsigned int i = 0; i < count; i++)
1976 {
1977 if (!hb_set_next (glyphs, &g))
1978 break;
1979 if (g < rangeRecord[i].first)
1980 return true;
1981 g = rangeRecord[i].last;
1982 }
1983 if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
1984 return true;
1985 /* Fall through. */
1986 }
1987 for (unsigned int i = 0; i < count; i++)
1988 if (rangeRecord[i].value == klass && rangeRecord[i].intersects (glyphs))
1989 return true;
1990 return false;
1991 }
1992
1993 protected:
1994 HBUINT16 classFormat; /* Format identifier--format = 2 */
1995 SortedArrayOf<RangeRecord>
1996 rangeRecord; /* Array of glyph ranges--ordered by
1997 * Start GlyphID */
1998 public:
1999 DEFINE_SIZE_ARRAY (4, rangeRecord);
2000};
2001
2002struct ClassDef
2003{
2004 /* Has interface. */
2005 static constexpr unsigned SENTINEL = 0;
2006 typedef unsigned int value_t;
2007 value_t operator [] (hb_codepoint_t k) const { return get (k); }
2008 bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
2009 /* Projection. */
2010 hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
2011
2012 unsigned int get (hb_codepoint_t k) const { return get_class (k); }
2013 unsigned int get_class (hb_codepoint_t glyph_id) const
2014 {
2015 switch (u.format) {
2016 case 1: return u.format1.get_class (glyph_id);
2017 case 2: return u.format2.get_class (glyph_id);
2018 default:return 0;
2019 }
2020 }
2021
2022 template<typename Iterator,
2023 hb_requires (hb_is_iterator (Iterator))>
2024 bool serialize (hb_serialize_context_t *c, Iterator it)
2025 {
2026 TRACE_SERIALIZE (this);
2027 if (unlikely (!c->extend_min (*this))) return_trace (false);
2028
2029 unsigned format = 2;
2030 if (likely (it))
2031 {
2032 hb_codepoint_t glyph_min = (*it).first;
2033 hb_codepoint_t glyph_max = + it
2034 | hb_map (hb_first)
2035 | hb_reduce (hb_max, 0u);
2036
2037 unsigned num_ranges = 1;
2038 hb_codepoint_t prev_gid = glyph_min;
2039 unsigned prev_klass = (*it).second;
2040
2041 for (const auto gid_klass_pair : it)
2042 {
2043 hb_codepoint_t cur_gid = gid_klass_pair.first;
2044 unsigned cur_klass = gid_klass_pair.second;
2045 if (cur_gid == glyph_min || !cur_klass) continue;
2046 if (cur_gid != prev_gid + 1 ||
2047 cur_klass != prev_klass)
2048 num_ranges++;
2049
2050 prev_gid = cur_gid;
2051 prev_klass = cur_klass;
2052 }
2053
2054 if (1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2055 format = 1;
2056 }
2057 u.format = format;
2058
2059 switch (u.format)
2060 {
2061 case 1: return_trace (u.format1.serialize (c, it));
2062 case 2: return_trace (u.format2.serialize (c, it));
2063 default:return_trace (false);
2064 }
2065 }
2066
2067 bool subset (hb_subset_context_t *c,
2068 hb_map_t *klass_map = nullptr /*OUT*/) const
2069 {
2070 TRACE_SUBSET (this);
2071 switch (u.format) {
2072 case 1: return_trace (u.format1.subset (c, klass_map));
2073 case 2: return_trace (u.format2.subset (c, klass_map));
2074 default:return_trace (false);
2075 }
2076 }
2077
2078 bool sanitize (hb_sanitize_context_t *c) const
2079 {
2080 TRACE_SANITIZE (this);
2081 if (!u.format.sanitize (c)) return_trace (false);
2082 switch (u.format) {
2083 case 1: return_trace (u.format1.sanitize (c));
2084 case 2: return_trace (u.format2.sanitize (c));
2085 default:return_trace (true);
2086 }
2087 }
2088
2089 /* Might return false if array looks unsorted.
2090 * Used for faster rejection of corrupt data. */
2091 template <typename set_t>
2092 bool collect_coverage (set_t *glyphs) const
2093 {
2094 switch (u.format) {
2095 case 1: return u.format1.collect_coverage (glyphs);
2096 case 2: return u.format2.collect_coverage (glyphs);
2097 default:return false;
2098 }
2099 }
2100
2101 /* Might return false if array looks unsorted.
2102 * Used for faster rejection of corrupt data. */
2103 template <typename set_t>
2104 bool collect_class (set_t *glyphs, unsigned int klass) const
2105 {
2106 switch (u.format) {
2107 case 1: return u.format1.collect_class (glyphs, klass);
2108 case 2: return u.format2.collect_class (glyphs, klass);
2109 default:return false;
2110 }
2111 }
2112
2113 bool intersects (const hb_set_t *glyphs) const
2114 {
2115 switch (u.format) {
2116 case 1: return u.format1.intersects (glyphs);
2117 case 2: return u.format2.intersects (glyphs);
2118 default:return false;
2119 }
2120 }
2121 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2122 {
2123 switch (u.format) {
2124 case 1: return u.format1.intersects_class (glyphs, klass);
2125 case 2: return u.format2.intersects_class (glyphs, klass);
2126 default:return false;
2127 }
2128 }
2129
2130 protected:
2131 union {
2132 HBUINT16 format; /* Format identifier */
2133 ClassDefFormat1 format1;
2134 ClassDefFormat2 format2;
2135 } u;
2136 public:
2137 DEFINE_SIZE_UNION (2, format);
2138};
2139
2140template<typename Iterator>
2141static inline void ClassDef_serialize (hb_serialize_context_t *c,
2142 Iterator it)
2143{ c->start_embed<ClassDef> ()->serialize (c, it); }
2144
2145
2146/*
2147 * Item Variation Store
2148 */
2149
2150struct VarRegionAxis
2151{
2152 float evaluate (int coord) const
2153 {
2154 int start = startCoord, peak = peakCoord, end = endCoord;
2155
2156 /* TODO Move these to sanitize(). */
2157 if (unlikely (start > peak || peak > end))
2158 return 1.;
2159 if (unlikely (start < 0 && end > 0 && peak != 0))
2160 return 1.;
2161
2162 if (peak == 0 || coord == peak)
2163 return 1.;
2164
2165 if (coord <= start || end <= coord)
2166 return 0.;
2167
2168 /* Interpolate */
2169 if (coord < peak)
2170 return float (coord - start) / (peak - start);
2171 else
2172 return float (end - coord) / (end - peak);
2173 }
2174
2175 bool sanitize (hb_sanitize_context_t *c) const
2176 {
2177 TRACE_SANITIZE (this);
2178 return_trace (c->check_struct (this));
2179 /* TODO Handle invalid start/peak/end configs, so we don't
2180 * have to do that at runtime. */
2181 }
2182
2183 public:
2184 F2DOT14 startCoord;
2185 F2DOT14 peakCoord;
2186 F2DOT14 endCoord;
2187 public:
2188 DEFINE_SIZE_STATIC (6);
2189};
2190
2191struct VarRegionList
2192{
2193 float evaluate (unsigned int region_index,
2194 const int *coords, unsigned int coord_len) const
2195 {
2196 if (unlikely (region_index >= regionCount))
2197 return 0.;
2198
2199 const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2200
2201 float v = 1.;
2202 unsigned int count = axisCount;
2203 for (unsigned int i = 0; i < count; i++)
2204 {
2205 int coord = i < coord_len ? coords[i] : 0;
2206 float factor = axes[i].evaluate (coord);
2207 if (factor == 0.f)
2208 return 0.;
2209 v *= factor;
2210 }
2211 return v;
2212 }
2213
2214 bool sanitize (hb_sanitize_context_t *c) const
2215 {
2216 TRACE_SANITIZE (this);
2217 return_trace (c->check_struct (this) &&
2218 axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount));
2219 }
2220
2221 bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
2222 {
2223 TRACE_SERIALIZE (this);
2224 VarRegionList *out = c->allocate_min<VarRegionList> ();
2225 if (unlikely (!out)) return_trace (false);
2226 axisCount = src->axisCount;
2227 regionCount = region_map.get_population ();
2228 if (unlikely (!c->allocate_size<VarRegionList> (get_size () - min_size))) return_trace (false);
2229 unsigned int region_count = src->get_region_count ();
2230 for (unsigned int r = 0; r < regionCount; r++)
2231 {
2232 unsigned int backward = region_map.backward (r);
2233 if (backward >= region_count) return_trace (false);
2234 memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2235 }
2236
2237 return_trace (true);
2238 }
2239
2240 unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
2241 unsigned int get_region_count () const { return regionCount; }
2242
2243 protected:
2244 HBUINT16 axisCount;
2245 HBUINT16 regionCount;
2246 UnsizedArrayOf<VarRegionAxis>
2247 axesZ;
2248 public:
2249 DEFINE_SIZE_ARRAY (4, axesZ);
2250};
2251
2252struct VarData
2253{
2254 unsigned int get_region_index_count () const
2255 { return regionIndices.len; }
2256
2257 unsigned int get_row_size () const
2258 { return shortCount + regionIndices.len; }
2259
2260 unsigned int get_size () const
2261 { return itemCount * get_row_size (); }
2262
2263 float get_delta (unsigned int inner,
2264 const int *coords, unsigned int coord_count,
2265 const VarRegionList &regions) const
2266 {
2267 if (unlikely (inner >= itemCount))
2268 return 0.;
2269
2270 unsigned int count = regionIndices.len;
2271 unsigned int scount = shortCount;
2272
2273 const HBUINT8 *bytes = get_delta_bytes ();
2274 const HBUINT8 *row = bytes + inner * (scount + count);
2275
2276 float delta = 0.;
2277 unsigned int i = 0;
2278
2279 const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
2280 for (; i < scount; i++)
2281 {
2282 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2283 delta += scalar * *scursor++;
2284 }
2285 const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2286 for (; i < count; i++)
2287 {
2288 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2289 delta += scalar * *bcursor++;
2290 }
2291
2292 return delta;
2293 }
2294
2295 void get_scalars (const int *coords, unsigned int coord_count,
2296 const VarRegionList &regions,
2297 float *scalars /*OUT */,
2298 unsigned int num_scalars) const
2299 {
2300 unsigned count = hb_min (num_scalars, regionIndices.len);
2301 for (unsigned int i = 0; i < count; i++)
2302 scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2303 for (unsigned int i = count; i < num_scalars; i++)
2304 scalars[i] = 0.f;
2305 }
2306
2307 bool sanitize (hb_sanitize_context_t *c) const
2308 {
2309 TRACE_SANITIZE (this);
2310 return_trace (c->check_struct (this) &&
2311 regionIndices.sanitize (c) &&
2312 shortCount <= regionIndices.len &&
2313 c->check_range (get_delta_bytes (),
2314 itemCount,
2315 get_row_size ()));
2316 }
2317
2318 bool serialize (hb_serialize_context_t *c,
2319 const VarData *src,
2320 const hb_inc_bimap_t &inner_map,
2321 const hb_bimap_t &region_map)
2322 {
2323 TRACE_SERIALIZE (this);
2324 if (unlikely (!c->extend_min (*this))) return_trace (false);
2325 itemCount = inner_map.get_next_value ();
2326
2327 /* Optimize short count */
2328 unsigned short ri_count = src->regionIndices.len;
2329 enum delta_size_t { kZero=0, kByte, kShort };
2330 hb_vector_t<delta_size_t> delta_sz;
2331 hb_vector_t<unsigned int> ri_map; /* maps old index to new index */
2332 delta_sz.resize (ri_count);
2333 ri_map.resize (ri_count);
2334 unsigned int new_short_count = 0;
2335 unsigned int r;
2336 for (r = 0; r < ri_count; r++)
2337 {
2338 delta_sz[r] = kZero;
2339 for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2340 {
2341 unsigned int old = inner_map.backward (i);
2342 int16_t delta = src->get_item_delta (old, r);
2343 if (delta < -128 || 127 < delta)
2344 {
2345 delta_sz[r] = kShort;
2346 new_short_count++;
2347 break;
2348 }
2349 else if (delta != 0)
2350 delta_sz[r] = kByte;
2351 }
2352 }
2353 unsigned int short_index = 0;
2354 unsigned int byte_index = new_short_count;
2355 unsigned int new_ri_count = 0;
2356 for (r = 0; r < ri_count; r++)
2357 if (delta_sz[r])
2358 {
2359 ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
2360 new_ri_count++;
2361 }
2362
2363 shortCount = new_short_count;
2364 regionIndices.len = new_ri_count;
2365
2366 unsigned int size = regionIndices.get_size () - HBUINT16::static_size/*regionIndices.len*/ + (get_row_size () * itemCount);
2367 if (unlikely (!c->allocate_size<HBUINT8> (size)))
2368 return_trace (false);
2369
2370 for (r = 0; r < ri_count; r++)
2371 if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
2372
2373 for (unsigned int i = 0; i < itemCount; i++)
2374 {
2375 unsigned int old = inner_map.backward (i);
2376 for (unsigned int r = 0; r < ri_count; r++)
2377 if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
2378 }
2379
2380 return_trace (true);
2381 }
2382
2383 void collect_region_refs (hb_inc_bimap_t &region_map, const hb_inc_bimap_t &inner_map) const
2384 {
2385 for (unsigned int r = 0; r < regionIndices.len; r++)
2386 {
2387 unsigned int region = regionIndices[r];
2388 if (region_map.has (region)) continue;
2389 for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2390 if (get_item_delta (inner_map.backward (i), r) != 0)
2391 {
2392 region_map.add (region);
2393 break;
2394 }
2395 }
2396 }
2397
2398 protected:
2399 const HBUINT8 *get_delta_bytes () const
2400 { return &StructAfter<HBUINT8> (regionIndices); }
2401
2402 HBUINT8 *get_delta_bytes ()
2403 { return &StructAfter<HBUINT8> (regionIndices); }
2404
2405 int16_t get_item_delta (unsigned int item, unsigned int region) const
2406 {
2407 if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
2408 const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
2409 if (region < shortCount)
2410 return ((const HBINT16 *)p)[region];
2411 else
2412 return (p + HBINT16::static_size * shortCount)[region - shortCount];
2413 }
2414
2415 void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
2416 {
2417 HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
2418 if (region < shortCount)
2419 ((HBINT16 *)p)[region] = delta;
2420 else
2421 (p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
2422 }
2423
2424 protected:
2425 HBUINT16 itemCount;
2426 HBUINT16 shortCount;
2427 ArrayOf<HBUINT16> regionIndices;
2428/*UnsizedArrayOf<HBUINT8>bytesX;*/
2429 public:
2430 DEFINE_SIZE_ARRAY (6, regionIndices);
2431};
2432
2433struct VariationStore
2434{
2435 float get_delta (unsigned int outer, unsigned int inner,
2436 const int *coords, unsigned int coord_count) const
2437 {
2438#ifdef HB_NO_VAR
2439 return 0.f;
2440#endif
2441
2442 if (unlikely (outer >= dataSets.len))
2443 return 0.f;
2444
2445 return (this+dataSets[outer]).get_delta (inner,
2446 coords, coord_count,
2447 this+regions);
2448 }
2449
2450 float get_delta (unsigned int index,
2451 const int *coords, unsigned int coord_count) const
2452 {
2453 unsigned int outer = index >> 16;
2454 unsigned int inner = index & 0xFFFF;
2455 return get_delta (outer, inner, coords, coord_count);
2456 }
2457
2458 bool sanitize (hb_sanitize_context_t *c) const
2459 {
2460#ifdef HB_NO_VAR
2461 return true;
2462#endif
2463
2464 TRACE_SANITIZE (this);
2465 return_trace (c->check_struct (this) &&
2466 format == 1 &&
2467 regions.sanitize (c, this) &&
2468 dataSets.sanitize (c, this));
2469 }
2470
2471 bool serialize (hb_serialize_context_t *c,
2472 const VariationStore *src,
2473 const hb_array_t <hb_inc_bimap_t> &inner_maps)
2474 {
2475 TRACE_SERIALIZE (this);
2476 unsigned int set_count = 0;
2477 for (unsigned int i = 0; i < inner_maps.length; i++)
2478 if (inner_maps[i].get_population () > 0) set_count++;
2479
2480 unsigned int size = min_size + HBUINT32::static_size * set_count;
2481 if (unlikely (!c->allocate_size<HBUINT32> (size))) return_trace (false);
2482 format = 1;
2483
2484 hb_inc_bimap_t region_map;
2485 for (unsigned int i = 0; i < inner_maps.length; i++)
2486 (src+src->dataSets[i]).collect_region_refs (region_map, inner_maps[i]);
2487 region_map.sort ();
2488
2489 if (unlikely (!regions.serialize (c, this)
2490 .serialize (c, &(src+src->regions), region_map))) return_trace (false);
2491
2492 /* TODO: The following code could be simplified when
2493 * OffsetListOf::subset () can take a custom param to be passed to VarData::serialize ()
2494 */
2495 dataSets.len = set_count;
2496 unsigned int set_index = 0;
2497 for (unsigned int i = 0; i < inner_maps.length; i++)
2498 {
2499 if (inner_maps[i].get_population () == 0) continue;
2500 if (unlikely (!dataSets[set_index++].serialize (c, this)
2501 .serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
2502 return_trace (false);
2503 }
2504
2505 return_trace (true);
2506 }
2507
2508 bool subset (hb_subset_context_t *c) const
2509 {
2510 TRACE_SUBSET (this);
2511
2512 VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
2513 if (unlikely (!varstore_prime)) return_trace (false);
2514
2515 const hb_set_t *variation_indices = c->plan->layout_variation_indices;
2516 if (variation_indices->is_empty ()) return_trace (false);
2517
2518 hb_vector_t<hb_inc_bimap_t> inner_maps;
2519 inner_maps.resize ((unsigned) dataSets.len);
2520 for (unsigned i = 0; i < inner_maps.length; i++)
2521 inner_maps[i].init ();
2522
2523 for (unsigned idx : c->plan->layout_variation_indices->iter ())
2524 {
2525 uint16_t major = idx >> 16;
2526 uint16_t minor = idx & 0xFFFF;
2527
2528 if (major >= inner_maps.length)
2529 {
2530 for (unsigned i = 0; i < inner_maps.length; i++)
2531 inner_maps[i].fini ();
2532 return_trace (false);
2533 }
2534 inner_maps[major].add (minor);
2535 }
2536 varstore_prime->serialize (c->serializer, this, inner_maps.as_array ());
2537
2538 for (unsigned i = 0; i < inner_maps.length; i++)
2539 inner_maps[i].fini ();
2540 return_trace (bool (varstore_prime->dataSets));
2541 }
2542
2543 unsigned int get_region_index_count (unsigned int ivs) const
2544 { return (this+dataSets[ivs]).get_region_index_count (); }
2545
2546 void get_scalars (unsigned int ivs,
2547 const int *coords, unsigned int coord_count,
2548 float *scalars /*OUT*/,
2549 unsigned int num_scalars) const
2550 {
2551#ifdef HB_NO_VAR
2552 for (unsigned i = 0; i < num_scalars; i++)
2553 scalars[i] = 0.f;
2554 return;
2555#endif
2556
2557 (this+dataSets[ivs]).get_scalars (coords, coord_count, this+regions,
2558 &scalars[0], num_scalars);
2559 }
2560
2561 unsigned int get_sub_table_count () const { return dataSets.len; }
2562
2563 protected:
2564 HBUINT16 format;
2565 LOffsetTo<VarRegionList> regions;
2566 LOffsetArrayOf<VarData> dataSets;
2567 public:
2568 DEFINE_SIZE_ARRAY (8, dataSets);
2569};
2570
2571/*
2572 * Feature Variations
2573 */
2574
2575struct ConditionFormat1
2576{
2577 friend struct Condition;
2578
2579 bool subset (hb_subset_context_t *c) const
2580 {
2581 TRACE_SUBSET (this);
2582 auto *out = c->serializer->embed (this);
2583 if (unlikely (!out)) return_trace (false);
2584 return_trace (true);
2585 }
2586
2587 private:
2588 bool evaluate (const int *coords, unsigned int coord_len) const
2589 {
2590 int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
2591 return filterRangeMinValue <= coord && coord <= filterRangeMaxValue;
2592 }
2593
2594 bool sanitize (hb_sanitize_context_t *c) const
2595 {
2596 TRACE_SANITIZE (this);
2597 return_trace (c->check_struct (this));
2598 }
2599
2600 protected:
2601 HBUINT16 format; /* Format identifier--format = 1 */
2602 HBUINT16 axisIndex;
2603 F2DOT14 filterRangeMinValue;
2604 F2DOT14 filterRangeMaxValue;
2605 public:
2606 DEFINE_SIZE_STATIC (8);
2607};
2608
2609struct Condition
2610{
2611 bool evaluate (const int *coords, unsigned int coord_len) const
2612 {
2613 switch (u.format) {
2614 case 1: return u.format1.evaluate (coords, coord_len);
2615 default:return false;
2616 }
2617 }
2618
2619 template <typename context_t, typename ...Ts>
2620 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2621 {
2622 TRACE_DISPATCH (this, u.format);
2623 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2624 switch (u.format) {
2625 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2626 default:return_trace (c->default_return_value ());
2627 }
2628 }
2629
2630 bool sanitize (hb_sanitize_context_t *c) const
2631 {
2632 TRACE_SANITIZE (this);
2633 if (!u.format.sanitize (c)) return_trace (false);
2634 switch (u.format) {
2635 case 1: return_trace (u.format1.sanitize (c));
2636 default:return_trace (true);
2637 }
2638 }
2639
2640 protected:
2641 union {
2642 HBUINT16 format; /* Format identifier */
2643 ConditionFormat1 format1;
2644 } u;
2645 public:
2646 DEFINE_SIZE_UNION (2, format);
2647};
2648
2649struct ConditionSet
2650{
2651 bool evaluate (const int *coords, unsigned int coord_len) const
2652 {
2653 unsigned int count = conditions.len;
2654 for (unsigned int i = 0; i < count; i++)
2655 if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
2656 return false;
2657 return true;
2658 }
2659
2660 bool subset (hb_subset_context_t *c) const
2661 {
2662 TRACE_SUBSET (this);
2663 auto *out = c->serializer->start_embed (this);
2664 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
2665
2666 + conditions.iter ()
2667 | hb_apply (subset_offset_array (c, out->conditions, this))
2668 ;
2669 return_trace (true);
2670 }
2671
2672 bool sanitize (hb_sanitize_context_t *c) const
2673 {
2674 TRACE_SANITIZE (this);
2675 return_trace (conditions.sanitize (c, this));
2676 }
2677
2678 protected:
2679 LOffsetArrayOf<Condition> conditions;
2680 public:
2681 DEFINE_SIZE_ARRAY (2, conditions);
2682};
2683
2684struct FeatureTableSubstitutionRecord
2685{
2686 friend struct FeatureTableSubstitution;
2687
2688 void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
2689 {
2690 return (base+feature).add_lookup_indexes_to (lookup_indexes);
2691 }
2692
2693 void closure_features (const void *base,
2694 const hb_map_t *lookup_indexes,
2695 hb_set_t *feature_indexes /* OUT */) const
2696 {
2697 if ((base+feature).intersects_lookup_indexes (lookup_indexes))
2698 feature_indexes->add (featureIndex);
2699 }
2700
2701 bool subset (hb_subset_layout_context_t *c, const void *base) const
2702 {
2703 TRACE_SUBSET (this);
2704 auto *out = c->subset_context->serializer->embed (this);
2705 if (unlikely (!out)) return_trace (false);
2706
2707 out->featureIndex = c->feature_index_map->get (featureIndex);
2708 bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c);
2709 return_trace (ret);
2710 }
2711
2712 bool sanitize (hb_sanitize_context_t *c, const void *base) const
2713 {
2714 TRACE_SANITIZE (this);
2715 return_trace (c->check_struct (this) && feature.sanitize (c, base));
2716 }
2717
2718 protected:
2719 HBUINT16 featureIndex;
2720 LOffsetTo<Feature> feature;
2721 public:
2722 DEFINE_SIZE_STATIC (6);
2723};
2724
2725struct FeatureTableSubstitution
2726{
2727 const Feature *find_substitute (unsigned int feature_index) const
2728 {
2729 unsigned int count = substitutions.len;
2730 for (unsigned int i = 0; i < count; i++)
2731 {
2732 const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
2733 if (record.featureIndex == feature_index)
2734 return &(this+record.feature);
2735 }
2736 return nullptr;
2737 }
2738
2739 void collect_lookups (const hb_set_t *feature_indexes,
2740 hb_set_t *lookup_indexes /* OUT */) const
2741 {
2742 + hb_iter (substitutions)
2743 | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
2744 | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
2745 { r.collect_lookups (this, lookup_indexes); })
2746 ;
2747 }
2748
2749 void closure_features (const hb_map_t *lookup_indexes,
2750 hb_set_t *feature_indexes /* OUT */) const
2751 {
2752 for (const FeatureTableSubstitutionRecord& record : substitutions)
2753 record.closure_features (this, lookup_indexes, feature_indexes);
2754 }
2755
2756 bool subset (hb_subset_context_t *c,
2757 hb_subset_layout_context_t *l) const
2758 {
2759 TRACE_SUBSET (this);
2760 auto *out = c->serializer->start_embed (*this);
2761 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
2762
2763 out->version.major = version.major;
2764 out->version.minor = version.minor;
2765
2766 + substitutions.iter ()
2767 | hb_apply (subset_record_array (l, &(out->substitutions), this))
2768 ;
2769 return_trace (true);
2770 }
2771
2772 bool sanitize (hb_sanitize_context_t *c) const
2773 {
2774 TRACE_SANITIZE (this);
2775 return_trace (version.sanitize (c) &&
2776 likely (version.major == 1) &&
2777 substitutions.sanitize (c, this));
2778 }
2779
2780 protected:
2781 FixedVersion<> version; /* Version--0x00010000u */
2782 ArrayOf<FeatureTableSubstitutionRecord>
2783 substitutions;
2784 public:
2785 DEFINE_SIZE_ARRAY (6, substitutions);
2786};
2787
2788struct FeatureVariationRecord
2789{
2790 friend struct FeatureVariations;
2791
2792 void collect_lookups (const void *base,
2793 const hb_set_t *feature_indexes,
2794 hb_set_t *lookup_indexes /* OUT */) const
2795 {
2796 return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
2797 }
2798
2799 void closure_features (const void *base,
2800 const hb_map_t *lookup_indexes,
2801 hb_set_t *feature_indexes /* OUT */) const
2802 {
2803 (base+substitutions).closure_features (lookup_indexes, feature_indexes);
2804 }
2805
2806 bool subset (hb_subset_layout_context_t *c, const void *base) const
2807 {
2808 TRACE_SUBSET (this);
2809 auto *out = c->subset_context->serializer->embed (this);
2810 if (unlikely (!out)) return_trace (false);
2811
2812 out->conditions.serialize_subset (c->subset_context, conditions, base);
2813 out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
2814
2815 return_trace (true);
2816 }
2817
2818 bool sanitize (hb_sanitize_context_t *c, const void *base) const
2819 {
2820 TRACE_SANITIZE (this);
2821 return_trace (conditions.sanitize (c, base) &&
2822 substitutions.sanitize (c, base));
2823 }
2824
2825 protected:
2826 LOffsetTo<ConditionSet>
2827 conditions;
2828 LOffsetTo<FeatureTableSubstitution>
2829 substitutions;
2830 public:
2831 DEFINE_SIZE_STATIC (8);
2832};
2833
2834struct FeatureVariations
2835{
2836 static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
2837
2838 bool find_index (const int *coords, unsigned int coord_len,
2839 unsigned int *index) const
2840 {
2841 unsigned int count = varRecords.len;
2842 for (unsigned int i = 0; i < count; i++)
2843 {
2844 const FeatureVariationRecord &record = varRecords.arrayZ[i];
2845 if ((this+record.conditions).evaluate (coords, coord_len))
2846 {
2847 *index = i;
2848 return true;
2849 }
2850 }
2851 *index = NOT_FOUND_INDEX;
2852 return false;
2853 }
2854
2855 const Feature *find_substitute (unsigned int variations_index,
2856 unsigned int feature_index) const
2857 {
2858 const FeatureVariationRecord &record = varRecords[variations_index];
2859 return (this+record.substitutions).find_substitute (feature_index);
2860 }
2861
2862 FeatureVariations* copy (hb_serialize_context_t *c) const
2863 {
2864 TRACE_SERIALIZE (this);
2865 return_trace (c->embed (*this));
2866 }
2867
2868 void collect_lookups (const hb_set_t *feature_indexes,
2869 hb_set_t *lookup_indexes /* OUT */) const
2870 {
2871 for (const FeatureVariationRecord& r : varRecords)
2872 r.collect_lookups (this, feature_indexes, lookup_indexes);
2873 }
2874
2875 void closure_features (const hb_map_t *lookup_indexes,
2876 hb_set_t *feature_indexes /* OUT */) const
2877 {
2878 for (const FeatureVariationRecord& record : varRecords)
2879 record.closure_features (this, lookup_indexes, feature_indexes);
2880 }
2881
2882 bool subset (hb_subset_context_t *c,
2883 hb_subset_layout_context_t *l) const
2884 {
2885 TRACE_SUBSET (this);
2886 auto *out = c->serializer->start_embed (*this);
2887 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
2888
2889 out->version.major = version.major;
2890 out->version.minor = version.minor;
2891
2892 + varRecords.iter ()
2893 | hb_apply (subset_record_array (l, &(out->varRecords), this))
2894 ;
2895 return_trace (bool (out->varRecords));
2896 }
2897
2898 bool sanitize (hb_sanitize_context_t *c) const
2899 {
2900 TRACE_SANITIZE (this);
2901 return_trace (version.sanitize (c) &&
2902 likely (version.major == 1) &&
2903 varRecords.sanitize (c, this));
2904 }
2905
2906 protected:
2907 FixedVersion<> version; /* Version--0x00010000u */
2908 LArrayOf<FeatureVariationRecord>
2909 varRecords;
2910 public:
2911 DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
2912};
2913
2914
2915/*
2916 * Device Tables
2917 */
2918
2919struct HintingDevice
2920{
2921 friend struct Device;
2922
2923 private:
2924
2925 hb_position_t get_x_delta (hb_font_t *font) const
2926 { return get_delta (font->x_ppem, font->x_scale); }
2927
2928 hb_position_t get_y_delta (hb_font_t *font) const
2929 { return get_delta (font->y_ppem, font->y_scale); }
2930
2931 public:
2932
2933 unsigned int get_size () const
2934 {
2935 unsigned int f = deltaFormat;
2936 if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
2937 return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
2938 }
2939
2940 bool sanitize (hb_sanitize_context_t *c) const
2941 {
2942 TRACE_SANITIZE (this);
2943 return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
2944 }
2945
2946 HintingDevice* copy (hb_serialize_context_t *c) const
2947 {
2948 TRACE_SERIALIZE (this);
2949 return_trace (c->embed<HintingDevice> (this));
2950 }
2951
2952 private:
2953
2954 int get_delta (unsigned int ppem, int scale) const
2955 {
2956 if (!ppem) return 0;
2957
2958 int pixels = get_delta_pixels (ppem);
2959
2960 if (!pixels) return 0;
2961
2962 return (int) (pixels * (int64_t) scale / ppem);
2963 }
2964 int get_delta_pixels (unsigned int ppem_size) const
2965 {
2966 unsigned int f = deltaFormat;
2967 if (unlikely (f < 1 || f > 3))
2968 return 0;
2969
2970 if (ppem_size < startSize || ppem_size > endSize)
2971 return 0;
2972
2973 unsigned int s = ppem_size - startSize;
2974
2975 unsigned int byte = deltaValueZ[s >> (4 - f)];
2976 unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
2977 unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
2978
2979 int delta = bits & mask;
2980
2981 if ((unsigned int) delta >= ((mask + 1) >> 1))
2982 delta -= mask + 1;
2983
2984 return delta;
2985 }
2986
2987 protected:
2988 HBUINT16 startSize; /* Smallest size to correct--in ppem */
2989 HBUINT16 endSize; /* Largest size to correct--in ppem */
2990 HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3
2991 * 1 Signed 2-bit value, 8 values per uint16
2992 * 2 Signed 4-bit value, 4 values per uint16
2993 * 3 Signed 8-bit value, 2 values per uint16
2994 */
2995 UnsizedArrayOf<HBUINT16>
2996 deltaValueZ; /* Array of compressed data */
2997 public:
2998 DEFINE_SIZE_ARRAY (6, deltaValueZ);
2999};
3000
3001struct VariationDevice
3002{
3003 friend struct Device;
3004
3005 private:
3006
3007 hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
3008 { return font->em_scalef_x (get_delta (font, store)); }
3009
3010 hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
3011 { return font->em_scalef_y (get_delta (font, store)); }
3012
3013 VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
3014 {
3015 TRACE_SERIALIZE (this);
3016 auto snap = c->snapshot ();
3017 auto *out = c->embed (this);
3018 if (unlikely (!out)) return_trace (nullptr);
3019 if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
3020
3021 unsigned org_idx = (outerIndex << 16) + innerIndex;
3022 if (!layout_variation_idx_map->has (org_idx))
3023 {
3024 c->revert (snap);
3025 return_trace (nullptr);
3026 }
3027 unsigned new_idx = layout_variation_idx_map->get (org_idx);
3028 out->outerIndex = new_idx >> 16;
3029 out->innerIndex = new_idx & 0xFFFF;
3030 return_trace (out);
3031 }
3032
3033 void record_variation_index (hb_set_t *layout_variation_indices) const
3034 {
3035 unsigned var_idx = (outerIndex << 16) + innerIndex;
3036 layout_variation_indices->add (var_idx);
3037 }
3038
3039 bool sanitize (hb_sanitize_context_t *c) const
3040 {
3041 TRACE_SANITIZE (this);
3042 return_trace (c->check_struct (this));
3043 }
3044
3045 private:
3046
3047 float get_delta (hb_font_t *font, const VariationStore &store) const
3048 {
3049 return store.get_delta (outerIndex, innerIndex, font->coords, font->num_coords);
3050 }
3051
3052 protected:
3053 HBUINT16 outerIndex;
3054 HBUINT16 innerIndex;
3055 HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */
3056 public:
3057 DEFINE_SIZE_STATIC (6);
3058};
3059
3060struct DeviceHeader
3061{
3062 protected:
3063 HBUINT16 reserved1;
3064 HBUINT16 reserved2;
3065 public:
3066 HBUINT16 format; /* Format identifier */
3067 public:
3068 DEFINE_SIZE_STATIC (6);
3069};
3070
3071struct Device
3072{
3073 hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3074 {
3075 switch (u.b.format)
3076 {
3077#ifndef HB_NO_HINTING
3078 case 1: case 2: case 3:
3079 return u.hinting.get_x_delta (font);
3080#endif
3081#ifndef HB_NO_VAR
3082 case 0x8000:
3083 return u.variation.get_x_delta (font, store);
3084#endif
3085 default:
3086 return 0;
3087 }
3088 }
3089 hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3090 {
3091 switch (u.b.format)
3092 {
3093 case 1: case 2: case 3:
3094#ifndef HB_NO_HINTING
3095 return u.hinting.get_y_delta (font);
3096#endif
3097#ifndef HB_NO_VAR
3098 case 0x8000:
3099 return u.variation.get_y_delta (font, store);
3100#endif
3101 default:
3102 return 0;
3103 }
3104 }
3105
3106 bool sanitize (hb_sanitize_context_t *c) const
3107 {
3108 TRACE_SANITIZE (this);
3109 if (!u.b.format.sanitize (c)) return_trace (false);
3110 switch (u.b.format) {
3111#ifndef HB_NO_HINTING
3112 case 1: case 2: case 3:
3113 return_trace (u.hinting.sanitize (c));
3114#endif
3115#ifndef HB_NO_VAR
3116 case 0x8000:
3117 return_trace (u.variation.sanitize (c));
3118#endif
3119 default:
3120 return_trace (true);
3121 }
3122 }
3123
3124 Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
3125 {
3126 TRACE_SERIALIZE (this);
3127 switch (u.b.format) {
3128#ifndef HB_NO_HINTING
3129 case 1:
3130 case 2:
3131 case 3:
3132 return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
3133#endif
3134#ifndef HB_NO_VAR
3135 case 0x8000:
3136 return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_map)));
3137#endif
3138 default:
3139 return_trace (nullptr);
3140 }
3141 }
3142
3143 void collect_variation_indices (hb_set_t *layout_variation_indices) const
3144 {
3145 switch (u.b.format) {
3146#ifndef HB_NO_HINTING
3147 case 1:
3148 case 2:
3149 case 3:
3150 return;
3151#endif
3152#ifndef HB_NO_VAR
3153 case 0x8000:
3154 u.variation.record_variation_index (layout_variation_indices);
3155 return;
3156#endif
3157 default:
3158 return;
3159 }
3160 }
3161
3162 protected:
3163 union {
3164 DeviceHeader b;
3165 HintingDevice hinting;
3166#ifndef HB_NO_VAR
3167 VariationDevice variation;
3168#endif
3169 } u;
3170 public:
3171 DEFINE_SIZE_UNION (6, b);
3172};
3173
3174
3175} /* namespace OT */
3176
3177
3178#endif /* HB_OT_LAYOUT_COMMON_HH */
3179