1 | /* |
2 | * Copyright © 2007,2008,2009 Red Hat, Inc. |
3 | * Copyright © 2010,2012 Google, Inc. |
4 | * |
5 | * This is part of HarfBuzz, a text shaping library. |
6 | * |
7 | * Permission is hereby granted, without written agreement and without |
8 | * license or royalty fees, to use, copy, modify, and distribute this |
9 | * software and its documentation for any purpose, provided that the |
10 | * above copyright notice and the following two paragraphs appear in |
11 | * all copies of this software. |
12 | * |
13 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | * DAMAGE. |
18 | * |
19 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | * |
25 | * Red Hat Author(s): Behdad Esfahbod |
26 | * Google Author(s): Behdad Esfahbod |
27 | */ |
28 | |
29 | #ifndef HB_OT_LAYOUT_COMMON_HH |
30 | #define HB_OT_LAYOUT_COMMON_HH |
31 | |
32 | #include "hb.hh" |
33 | #include "hb-ot-layout.hh" |
34 | #include "hb-open-type.hh" |
35 | #include "hb-set.hh" |
36 | #include "hb-bimap.hh" |
37 | |
38 | #include "OT/Layout/Common/Coverage.hh" |
39 | #include "OT/Layout/types.hh" |
40 | |
41 | // TODO(garretrieger): cleanup these after migration. |
42 | using OT::Layout::Common::Coverage; |
43 | using OT::Layout::Common::RangeRecord; |
44 | using OT::Layout::SmallTypes; |
45 | using OT::Layout::MediumTypes; |
46 | |
47 | |
48 | namespace OT { |
49 | |
50 | template<typename Iterator> |
51 | static inline bool ClassDef_serialize (hb_serialize_context_t *c, |
52 | Iterator it); |
53 | |
54 | static bool ClassDef_remap_and_serialize ( |
55 | hb_serialize_context_t *c, |
56 | const hb_set_t &klasses, |
57 | bool use_class_zero, |
58 | hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */ |
59 | hb_map_t *klass_map /*IN/OUT*/); |
60 | |
61 | struct hb_collect_feature_substitutes_with_var_context_t |
62 | { |
63 | const hb_map_t *axes_index_tag_map; |
64 | const hb_hashmap_t<hb_tag_t, Triple> *axes_location; |
65 | hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map; |
66 | hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map; |
67 | bool& insert_catch_all_feature_variation_record; |
68 | |
69 | // not stored in subset_plan |
70 | hb_set_t *feature_indices; |
71 | bool apply; |
72 | bool variation_applied; |
73 | bool universal; |
74 | unsigned cur_record_idx; |
75 | hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> *conditionset_map; |
76 | }; |
77 | |
78 | struct hb_prune_langsys_context_t |
79 | { |
80 | hb_prune_langsys_context_t (const void *table_, |
81 | hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_, |
82 | const hb_map_t *duplicate_feature_map_, |
83 | hb_set_t *new_collected_feature_indexes_) |
84 | :table (table_), |
85 | script_langsys_map (script_langsys_map_), |
86 | duplicate_feature_map (duplicate_feature_map_), |
87 | new_feature_indexes (new_collected_feature_indexes_), |
88 | script_count (0),langsys_feature_count (0) {} |
89 | |
90 | bool visitScript () |
91 | { return script_count++ < HB_MAX_SCRIPTS; } |
92 | |
93 | bool visitLangsys (unsigned feature_count) |
94 | { |
95 | langsys_feature_count += feature_count; |
96 | return langsys_feature_count < HB_MAX_LANGSYS_FEATURE_COUNT; |
97 | } |
98 | |
99 | public: |
100 | const void *table; |
101 | hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map; |
102 | const hb_map_t *duplicate_feature_map; |
103 | hb_set_t *new_feature_indexes; |
104 | |
105 | private: |
106 | unsigned script_count; |
107 | unsigned langsys_feature_count; |
108 | }; |
109 | |
110 | struct hb_subset_layout_context_t : |
111 | hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET> |
112 | { |
113 | const char *get_name () { return "SUBSET_LAYOUT" ; } |
114 | static return_t default_return_value () { return hb_empty_t (); } |
115 | |
116 | bool visitScript () |
117 | { |
118 | return script_count++ < HB_MAX_SCRIPTS; |
119 | } |
120 | |
121 | bool visitLangSys () |
122 | { |
123 | return langsys_count++ < HB_MAX_LANGSYS; |
124 | } |
125 | |
126 | bool visitFeatureIndex (int count) |
127 | { |
128 | feature_index_count += count; |
129 | return feature_index_count < HB_MAX_FEATURE_INDICES; |
130 | } |
131 | |
132 | bool visitLookupIndex() |
133 | { |
134 | lookup_index_count++; |
135 | return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT; |
136 | } |
137 | |
138 | hb_subset_context_t *subset_context; |
139 | const hb_tag_t table_tag; |
140 | const hb_map_t *lookup_index_map; |
141 | const hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map; |
142 | const hb_map_t *feature_index_map; |
143 | const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map; |
144 | hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map; |
145 | |
146 | unsigned cur_script_index; |
147 | unsigned cur_feature_var_record_idx; |
148 | |
149 | hb_subset_layout_context_t (hb_subset_context_t *c_, |
150 | hb_tag_t tag_) : |
151 | subset_context (c_), |
152 | table_tag (tag_), |
153 | cur_script_index (0xFFFFu), |
154 | cur_feature_var_record_idx (0u), |
155 | script_count (0), |
156 | langsys_count (0), |
157 | feature_index_count (0), |
158 | lookup_index_count (0) |
159 | { |
160 | if (tag_ == HB_OT_TAG_GSUB) |
161 | { |
162 | lookup_index_map = &c_->plan->gsub_lookups; |
163 | script_langsys_map = &c_->plan->gsub_langsys; |
164 | feature_index_map = &c_->plan->gsub_features; |
165 | feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map; |
166 | feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map; |
167 | } |
168 | else |
169 | { |
170 | lookup_index_map = &c_->plan->gpos_lookups; |
171 | script_langsys_map = &c_->plan->gpos_langsys; |
172 | feature_index_map = &c_->plan->gpos_features; |
173 | feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map; |
174 | feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map; |
175 | } |
176 | } |
177 | |
178 | private: |
179 | unsigned script_count; |
180 | unsigned langsys_count; |
181 | unsigned feature_index_count; |
182 | unsigned lookup_index_count; |
183 | }; |
184 | |
185 | struct VariationStore; |
186 | struct hb_collect_variation_indices_context_t : |
187 | hb_dispatch_context_t<hb_collect_variation_indices_context_t> |
188 | { |
189 | template <typename T> |
190 | return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); } |
191 | static return_t default_return_value () { return hb_empty_t (); } |
192 | |
193 | hb_set_t *layout_variation_indices; |
194 | hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map; |
195 | hb_vector_t<int> *normalized_coords; |
196 | const VariationStore *var_store; |
197 | const hb_set_t *glyph_set; |
198 | const hb_map_t *gpos_lookups; |
199 | float *store_cache; |
200 | |
201 | hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_, |
202 | hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *varidx_delta_map_, |
203 | hb_vector_t<int> *normalized_coords_, |
204 | const VariationStore *var_store_, |
205 | const hb_set_t *glyph_set_, |
206 | const hb_map_t *gpos_lookups_, |
207 | float *store_cache_) : |
208 | layout_variation_indices (layout_variation_indices_), |
209 | varidx_delta_map (varidx_delta_map_), |
210 | normalized_coords (normalized_coords_), |
211 | var_store (var_store_), |
212 | glyph_set (glyph_set_), |
213 | gpos_lookups (gpos_lookups_), |
214 | store_cache (store_cache_) {} |
215 | }; |
216 | |
217 | template<typename OutputArray> |
218 | struct subset_offset_array_t |
219 | { |
220 | subset_offset_array_t (hb_subset_context_t *subset_context_, |
221 | OutputArray& out_, |
222 | const void *base_) : subset_context (subset_context_), |
223 | out (out_), base (base_) {} |
224 | |
225 | template <typename T> |
226 | bool operator () (T&& offset) |
227 | { |
228 | auto snap = subset_context->serializer->snapshot (); |
229 | auto *o = out.serialize_append (subset_context->serializer); |
230 | if (unlikely (!o)) return false; |
231 | bool ret = o->serialize_subset (subset_context, offset, base); |
232 | if (!ret) |
233 | { |
234 | out.pop (); |
235 | subset_context->serializer->revert (snap); |
236 | } |
237 | return ret; |
238 | } |
239 | |
240 | private: |
241 | hb_subset_context_t *subset_context; |
242 | OutputArray &out; |
243 | const void *base; |
244 | }; |
245 | |
246 | |
247 | template<typename OutputArray, typename Arg> |
248 | struct subset_offset_array_arg_t |
249 | { |
250 | subset_offset_array_arg_t (hb_subset_context_t *subset_context_, |
251 | OutputArray& out_, |
252 | const void *base_, |
253 | Arg &&arg_) : subset_context (subset_context_), out (out_), |
254 | base (base_), arg (arg_) {} |
255 | |
256 | template <typename T> |
257 | bool operator () (T&& offset) |
258 | { |
259 | auto snap = subset_context->serializer->snapshot (); |
260 | auto *o = out.serialize_append (subset_context->serializer); |
261 | if (unlikely (!o)) return false; |
262 | bool ret = o->serialize_subset (subset_context, offset, base, arg); |
263 | if (!ret) |
264 | { |
265 | out.pop (); |
266 | subset_context->serializer->revert (snap); |
267 | } |
268 | return ret; |
269 | } |
270 | |
271 | private: |
272 | hb_subset_context_t *subset_context; |
273 | OutputArray &out; |
274 | const void *base; |
275 | Arg &&arg; |
276 | }; |
277 | |
278 | /* |
279 | * Helper to subset an array of offsets. Subsets the thing pointed to by each offset |
280 | * and discards the offset in the array if the subset operation results in an empty |
281 | * thing. |
282 | */ |
283 | struct |
284 | { |
285 | template<typename OutputArray> |
286 | subset_offset_array_t<OutputArray> |
287 | operator () (hb_subset_context_t *subset_context, OutputArray& out, |
288 | const void *base) const |
289 | { return subset_offset_array_t<OutputArray> (subset_context, out, base); } |
290 | |
291 | /* Variant with one extra argument passed to serialize_subset */ |
292 | template<typename OutputArray, typename Arg> |
293 | subset_offset_array_arg_t<OutputArray, Arg> |
294 | operator () (hb_subset_context_t *subset_context, OutputArray& out, |
295 | const void *base, Arg &&arg) const |
296 | { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); } |
297 | } |
298 | HB_FUNCOBJ (subset_offset_array); |
299 | |
300 | template<typename OutputArray> |
301 | struct subset_record_array_t |
302 | { |
303 | subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_, |
304 | const void *base_) : subset_layout_context (c_), |
305 | out (out_), base (base_) {} |
306 | |
307 | template <typename T> |
308 | void |
309 | operator () (T&& record) |
310 | { |
311 | auto snap = subset_layout_context->subset_context->serializer->snapshot (); |
312 | bool ret = record.subset (subset_layout_context, base); |
313 | if (!ret) subset_layout_context->subset_context->serializer->revert (snap); |
314 | else out->len++; |
315 | } |
316 | |
317 | private: |
318 | hb_subset_layout_context_t *subset_layout_context; |
319 | OutputArray *out; |
320 | const void *base; |
321 | }; |
322 | |
323 | template<typename OutputArray, typename Arg> |
324 | struct subset_record_array_arg_t |
325 | { |
326 | subset_record_array_arg_t (hb_subset_layout_context_t *c_, OutputArray* out_, |
327 | const void *base_, |
328 | Arg &&arg_) : subset_layout_context (c_), |
329 | out (out_), base (base_), arg (arg_) {} |
330 | |
331 | template <typename T> |
332 | void |
333 | operator () (T&& record) |
334 | { |
335 | auto snap = subset_layout_context->subset_context->serializer->snapshot (); |
336 | bool ret = record.subset (subset_layout_context, base, arg); |
337 | if (!ret) subset_layout_context->subset_context->serializer->revert (snap); |
338 | else out->len++; |
339 | } |
340 | |
341 | private: |
342 | hb_subset_layout_context_t *subset_layout_context; |
343 | OutputArray *out; |
344 | const void *base; |
345 | Arg &&arg; |
346 | }; |
347 | |
348 | /* |
349 | * Helper to subset a RecordList/record array. Subsets each Record in the array and |
350 | * discards the record if the subset operation returns false. |
351 | */ |
352 | struct |
353 | { |
354 | template<typename OutputArray> |
355 | subset_record_array_t<OutputArray> |
356 | operator () (hb_subset_layout_context_t *c, OutputArray* out, |
357 | const void *base) const |
358 | { return subset_record_array_t<OutputArray> (c, out, base); } |
359 | |
360 | /* Variant with one extra argument passed to subset */ |
361 | template<typename OutputArray, typename Arg> |
362 | subset_record_array_arg_t<OutputArray, Arg> |
363 | operator () (hb_subset_layout_context_t *c, OutputArray* out, |
364 | const void *base, Arg &&arg) const |
365 | { return subset_record_array_arg_t<OutputArray, Arg> (c, out, base, arg); } |
366 | } |
367 | HB_FUNCOBJ (subset_record_array); |
368 | |
369 | |
370 | template<typename OutputArray> |
371 | struct serialize_math_record_array_t |
372 | { |
373 | serialize_math_record_array_t (hb_serialize_context_t *serialize_context_, |
374 | OutputArray& out_, |
375 | const void *base_) : serialize_context (serialize_context_), |
376 | out (out_), base (base_) {} |
377 | |
378 | template <typename T> |
379 | bool operator () (T&& record) |
380 | { |
381 | if (!serialize_context->copy (record, base)) return false; |
382 | out.len++; |
383 | return true; |
384 | } |
385 | |
386 | private: |
387 | hb_serialize_context_t *serialize_context; |
388 | OutputArray &out; |
389 | const void *base; |
390 | }; |
391 | |
392 | /* |
393 | * Helper to serialize an array of MATH records. |
394 | */ |
395 | struct |
396 | { |
397 | template<typename OutputArray> |
398 | serialize_math_record_array_t<OutputArray> |
399 | operator () (hb_serialize_context_t *serialize_context, OutputArray& out, |
400 | const void *base) const |
401 | { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); } |
402 | |
403 | } |
404 | HB_FUNCOBJ (serialize_math_record_array); |
405 | |
406 | /* |
407 | * |
408 | * OpenType Layout Common Table Formats |
409 | * |
410 | */ |
411 | |
412 | |
413 | /* |
414 | * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList |
415 | */ |
416 | |
417 | struct IndexArray : Array16Of<Index> |
418 | { |
419 | bool intersects (const hb_map_t *indexes) const |
420 | { return hb_any (*this, indexes); } |
421 | |
422 | template <typename Iterator, |
423 | hb_requires (hb_is_iterator (Iterator))> |
424 | void serialize (hb_serialize_context_t *c, |
425 | hb_subset_layout_context_t *l, |
426 | Iterator it) |
427 | { |
428 | if (!it) return; |
429 | if (unlikely (!c->extend_min ((*this)))) return; |
430 | |
431 | for (const auto _ : it) |
432 | { |
433 | if (!l->visitLookupIndex()) break; |
434 | |
435 | Index i; |
436 | i = _; |
437 | c->copy (i); |
438 | this->len++; |
439 | } |
440 | } |
441 | |
442 | unsigned int get_indexes (unsigned int start_offset, |
443 | unsigned int *_count /* IN/OUT */, |
444 | unsigned int *_indexes /* OUT */) const |
445 | { |
446 | if (_count) |
447 | { |
448 | + this->as_array ().sub_array (start_offset, _count) |
449 | | hb_sink (hb_array (_indexes, *_count)) |
450 | ; |
451 | } |
452 | return this->len; |
453 | } |
454 | |
455 | void add_indexes_to (hb_set_t* output /* OUT */) const |
456 | { |
457 | output->add_array (as_array ()); |
458 | } |
459 | }; |
460 | |
461 | |
462 | /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */ |
463 | struct FeatureParamsSize |
464 | { |
465 | bool sanitize (hb_sanitize_context_t *c) const |
466 | { |
467 | TRACE_SANITIZE (this); |
468 | if (unlikely (!c->check_struct (this))) return_trace (false); |
469 | |
470 | /* This subtable has some "history", if you will. Some earlier versions of |
471 | * Adobe tools calculated the offset of the FeatureParams subtable from the |
472 | * beginning of the FeatureList table! Now, that is dealt with in the |
473 | * Feature implementation. But we still need to be able to tell junk from |
474 | * real data. Note: We don't check that the nameID actually exists. |
475 | * |
476 | * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk : |
477 | * |
478 | * Yes, it is correct that a new version of the AFDKO (version 2.0) will be |
479 | * coming out soon, and that the makeotf program will build a font with a |
480 | * 'size' feature that is correct by the specification. |
481 | * |
482 | * The specification for this feature tag is in the "OpenType Layout Tag |
483 | * Registry". You can see a copy of this at: |
484 | * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size |
485 | * |
486 | * Here is one set of rules to determine if the 'size' feature is built |
487 | * correctly, or as by the older versions of MakeOTF. You may be able to do |
488 | * better. |
489 | * |
490 | * Assume that the offset to the size feature is according to specification, |
491 | * and make the following value checks. If it fails, assume the size |
492 | * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it. |
493 | * If this fails, reject the 'size' feature. The older makeOTF's calculated the |
494 | * offset from the beginning of the FeatureList table, rather than from the |
495 | * beginning of the 'size' Feature table. |
496 | * |
497 | * If "design size" == 0: |
498 | * fails check |
499 | * |
500 | * Else if ("subfamily identifier" == 0 and |
501 | * "range start" == 0 and |
502 | * "range end" == 0 and |
503 | * "range start" == 0 and |
504 | * "menu name ID" == 0) |
505 | * passes check: this is the format used when there is a design size |
506 | * specified, but there is no recommended size range. |
507 | * |
508 | * Else if ("design size" < "range start" or |
509 | * "design size" > "range end" or |
510 | * "range end" <= "range start" or |
511 | * "menu name ID" < 256 or |
512 | * "menu name ID" > 32767 or |
513 | * menu name ID is not a name ID which is actually in the name table) |
514 | * fails test |
515 | * Else |
516 | * passes test. |
517 | */ |
518 | |
519 | if (!designSize) |
520 | return_trace (false); |
521 | else if (subfamilyID == 0 && |
522 | subfamilyNameID == 0 && |
523 | rangeStart == 0 && |
524 | rangeEnd == 0) |
525 | return_trace (true); |
526 | else if (designSize < rangeStart || |
527 | designSize > rangeEnd || |
528 | subfamilyNameID < 256 || |
529 | subfamilyNameID > 32767) |
530 | return_trace (false); |
531 | else |
532 | return_trace (true); |
533 | } |
534 | |
535 | void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const |
536 | { nameids_to_retain->add (subfamilyNameID); } |
537 | |
538 | bool subset (hb_subset_context_t *c) const |
539 | { |
540 | TRACE_SUBSET (this); |
541 | return_trace ((bool) c->serializer->embed (*this)); |
542 | } |
543 | |
544 | HBUINT16 designSize; /* Represents the design size in 720/inch |
545 | * units (decipoints). The design size entry |
546 | * must be non-zero. When there is a design |
547 | * size but no recommended size range, the |
548 | * rest of the array will consist of zeros. */ |
549 | HBUINT16 subfamilyID; /* Has no independent meaning, but serves |
550 | * as an identifier that associates fonts |
551 | * in a subfamily. All fonts which share a |
552 | * Preferred or Font Family name and which |
553 | * differ only by size range shall have the |
554 | * same subfamily value, and no fonts which |
555 | * differ in weight or style shall have the |
556 | * same subfamily value. If this value is |
557 | * zero, the remaining fields in the array |
558 | * will be ignored. */ |
559 | NameID subfamilyNameID;/* If the preceding value is non-zero, this |
560 | * value must be set in the range 256 - 32767 |
561 | * (inclusive). It records the value of a |
562 | * field in the name table, which must |
563 | * contain English-language strings encoded |
564 | * in Windows Unicode and Macintosh Roman, |
565 | * and may contain additional strings |
566 | * localized to other scripts and languages. |
567 | * Each of these strings is the name an |
568 | * application should use, in combination |
569 | * with the family name, to represent the |
570 | * subfamily in a menu. Applications will |
571 | * choose the appropriate version based on |
572 | * their selection criteria. */ |
573 | HBUINT16 rangeStart; /* Large end of the recommended usage range |
574 | * (inclusive), stored in 720/inch units |
575 | * (decipoints). */ |
576 | HBUINT16 rangeEnd; /* Small end of the recommended usage range |
577 | (exclusive), stored in 720/inch units |
578 | * (decipoints). */ |
579 | public: |
580 | DEFINE_SIZE_STATIC (10); |
581 | }; |
582 | |
583 | /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */ |
584 | struct FeatureParamsStylisticSet |
585 | { |
586 | bool sanitize (hb_sanitize_context_t *c) const |
587 | { |
588 | TRACE_SANITIZE (this); |
589 | /* Right now minorVersion is at zero. Which means, any table supports |
590 | * the uiNameID field. */ |
591 | return_trace (c->check_struct (this)); |
592 | } |
593 | |
594 | void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const |
595 | { nameids_to_retain->add (uiNameID); } |
596 | |
597 | bool subset (hb_subset_context_t *c) const |
598 | { |
599 | TRACE_SUBSET (this); |
600 | return_trace ((bool) c->serializer->embed (*this)); |
601 | } |
602 | |
603 | HBUINT16 version; /* (set to 0): This corresponds to a “minor” |
604 | * version number. Additional data may be |
605 | * added to the end of this Feature Parameters |
606 | * table in the future. */ |
607 | |
608 | NameID uiNameID; /* The 'name' table name ID that specifies a |
609 | * string (or strings, for multiple languages) |
610 | * for a user-interface label for this |
611 | * feature. The values of uiLabelNameId and |
612 | * sampleTextNameId are expected to be in the |
613 | * font-specific name ID range (256-32767), |
614 | * though that is not a requirement in this |
615 | * Feature Parameters specification. The |
616 | * user-interface label for the feature can |
617 | * be provided in multiple languages. An |
618 | * English string should be included as a |
619 | * fallback. The string should be kept to a |
620 | * minimal length to fit comfortably with |
621 | * different application interfaces. */ |
622 | public: |
623 | DEFINE_SIZE_STATIC (4); |
624 | }; |
625 | |
626 | /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */ |
627 | struct FeatureParamsCharacterVariants |
628 | { |
629 | unsigned |
630 | get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const |
631 | { |
632 | if (char_count) |
633 | { |
634 | + characters.as_array ().sub_array (start_offset, char_count) |
635 | | hb_sink (hb_array (chars, *char_count)) |
636 | ; |
637 | } |
638 | return characters.len; |
639 | } |
640 | |
641 | unsigned get_size () const |
642 | { return min_size + characters.len * HBUINT24::static_size; } |
643 | |
644 | void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const |
645 | { |
646 | if (featUILableNameID) nameids_to_retain->add (featUILableNameID); |
647 | if (featUITooltipTextNameID) nameids_to_retain->add (featUITooltipTextNameID); |
648 | if (sampleTextNameID) nameids_to_retain->add (sampleTextNameID); |
649 | |
650 | if (!firstParamUILabelNameID || !numNamedParameters || numNamedParameters >= 0x7FFF) |
651 | return; |
652 | |
653 | unsigned last_name_id = (unsigned) firstParamUILabelNameID + (unsigned) numNamedParameters - 1; |
654 | if (last_name_id >= 256 && last_name_id <= 32767) |
655 | nameids_to_retain->add_range (firstParamUILabelNameID, last_name_id); |
656 | } |
657 | |
658 | bool subset (hb_subset_context_t *c) const |
659 | { |
660 | TRACE_SUBSET (this); |
661 | return_trace ((bool) c->serializer->embed (*this)); |
662 | } |
663 | |
664 | bool sanitize (hb_sanitize_context_t *c) const |
665 | { |
666 | TRACE_SANITIZE (this); |
667 | return_trace (c->check_struct (this) && |
668 | characters.sanitize (c)); |
669 | } |
670 | |
671 | HBUINT16 format; /* Format number is set to 0. */ |
672 | NameID featUILableNameID; /* The ‘name’ table name ID that |
673 | * specifies a string (or strings, |
674 | * for multiple languages) for a |
675 | * user-interface label for this |
676 | * feature. (May be NULL.) */ |
677 | NameID featUITooltipTextNameID;/* The ‘name’ table name ID that |
678 | * specifies a string (or strings, |
679 | * for multiple languages) that an |
680 | * application can use for tooltip |
681 | * text for this feature. (May be |
682 | * nullptr.) */ |
683 | NameID sampleTextNameID; /* The ‘name’ table name ID that |
684 | * specifies sample text that |
685 | * illustrates the effect of this |
686 | * feature. (May be NULL.) */ |
687 | HBUINT16 numNamedParameters; /* Number of named parameters. (May |
688 | * be zero.) */ |
689 | NameID firstParamUILabelNameID;/* The first ‘name’ table name ID |
690 | * used to specify strings for |
691 | * user-interface labels for the |
692 | * feature parameters. (Must be zero |
693 | * if numParameters is zero.) */ |
694 | Array16Of<HBUINT24> |
695 | characters; /* Array of the Unicode Scalar Value |
696 | * of the characters for which this |
697 | * feature provides glyph variants. |
698 | * (May be zero.) */ |
699 | public: |
700 | DEFINE_SIZE_ARRAY (14, characters); |
701 | }; |
702 | |
703 | struct FeatureParams |
704 | { |
705 | bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const |
706 | { |
707 | #ifdef HB_NO_LAYOUT_FEATURE_PARAMS |
708 | return true; |
709 | #endif |
710 | TRACE_SANITIZE (this); |
711 | if (tag == HB_TAG ('s','i','z','e')) |
712 | return_trace (u.size.sanitize (c)); |
713 | if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */ |
714 | return_trace (u.stylisticSet.sanitize (c)); |
715 | if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */ |
716 | return_trace (u.characterVariants.sanitize (c)); |
717 | return_trace (true); |
718 | } |
719 | |
720 | void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const |
721 | { |
722 | #ifdef HB_NO_LAYOUT_FEATURE_PARAMS |
723 | return; |
724 | #endif |
725 | if (tag == HB_TAG ('s','i','z','e')) |
726 | return (u.size.collect_name_ids (nameids_to_retain)); |
727 | if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */ |
728 | return (u.stylisticSet.collect_name_ids (nameids_to_retain)); |
729 | if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */ |
730 | return (u.characterVariants.collect_name_ids (nameids_to_retain)); |
731 | } |
732 | |
733 | bool subset (hb_subset_context_t *c, const Tag* tag) const |
734 | { |
735 | TRACE_SUBSET (this); |
736 | if (!tag) return_trace (false); |
737 | if (*tag == HB_TAG ('s','i','z','e')) |
738 | return_trace (u.size.subset (c)); |
739 | if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */ |
740 | return_trace (u.stylisticSet.subset (c)); |
741 | if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */ |
742 | return_trace (u.characterVariants.subset (c)); |
743 | return_trace (false); |
744 | } |
745 | |
746 | #ifndef HB_NO_LAYOUT_FEATURE_PARAMS |
747 | const FeatureParamsSize& get_size_params (hb_tag_t tag) const |
748 | { |
749 | if (tag == HB_TAG ('s','i','z','e')) |
750 | return u.size; |
751 | return Null (FeatureParamsSize); |
752 | } |
753 | const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const |
754 | { |
755 | if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */ |
756 | return u.stylisticSet; |
757 | return Null (FeatureParamsStylisticSet); |
758 | } |
759 | const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const |
760 | { |
761 | if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */ |
762 | return u.characterVariants; |
763 | return Null (FeatureParamsCharacterVariants); |
764 | } |
765 | #endif |
766 | |
767 | private: |
768 | union { |
769 | FeatureParamsSize size; |
770 | FeatureParamsStylisticSet stylisticSet; |
771 | FeatureParamsCharacterVariants characterVariants; |
772 | } u; |
773 | public: |
774 | DEFINE_SIZE_MIN (0); |
775 | }; |
776 | |
777 | struct Record_sanitize_closure_t { |
778 | hb_tag_t tag; |
779 | const void *list_base; |
780 | }; |
781 | |
782 | struct Feature |
783 | { |
784 | unsigned int get_lookup_count () const |
785 | { return lookupIndex.len; } |
786 | hb_tag_t get_lookup_index (unsigned int i) const |
787 | { return lookupIndex[i]; } |
788 | unsigned int get_lookup_indexes (unsigned int start_index, |
789 | unsigned int *lookup_count /* IN/OUT */, |
790 | unsigned int *lookup_tags /* OUT */) const |
791 | { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); } |
792 | void add_lookup_indexes_to (hb_set_t *lookup_indexes) const |
793 | { lookupIndex.add_indexes_to (lookup_indexes); } |
794 | |
795 | const FeatureParams &get_feature_params () const |
796 | { return this+featureParams; } |
797 | |
798 | bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const |
799 | { return lookupIndex.intersects (lookup_indexes); } |
800 | |
801 | void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const |
802 | { |
803 | if (featureParams) |
804 | get_feature_params ().collect_name_ids (tag, nameids_to_retain); |
805 | } |
806 | |
807 | bool subset (hb_subset_context_t *c, |
808 | hb_subset_layout_context_t *l, |
809 | const Tag *tag = nullptr) const |
810 | { |
811 | TRACE_SUBSET (this); |
812 | auto *out = c->serializer->start_embed (*this); |
813 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
814 | |
815 | out->featureParams.serialize_subset (c, featureParams, this, tag); |
816 | |
817 | auto it = |
818 | + hb_iter (lookupIndex) |
819 | | hb_filter (l->lookup_index_map) |
820 | | hb_map (l->lookup_index_map) |
821 | ; |
822 | |
823 | out->lookupIndex.serialize (c->serializer, l, it); |
824 | // The decision to keep or drop this feature is already made before we get here |
825 | // so always retain it. |
826 | return_trace (true); |
827 | } |
828 | |
829 | bool sanitize (hb_sanitize_context_t *c, |
830 | const Record_sanitize_closure_t *closure = nullptr) const |
831 | { |
832 | TRACE_SANITIZE (this); |
833 | if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c)))) |
834 | return_trace (false); |
835 | |
836 | /* Some earlier versions of Adobe tools calculated the offset of the |
837 | * FeatureParams subtable from the beginning of the FeatureList table! |
838 | * |
839 | * If sanitizing "failed" for the FeatureParams subtable, try it with the |
840 | * alternative location. We would know sanitize "failed" if old value |
841 | * of the offset was non-zero, but it's zeroed now. |
842 | * |
843 | * Only do this for the 'size' feature, since at the time of the faulty |
844 | * Adobe tools, only the 'size' feature had FeatureParams defined. |
845 | */ |
846 | |
847 | if (likely (featureParams.is_null ())) |
848 | return_trace (true); |
849 | |
850 | unsigned int orig_offset = featureParams; |
851 | if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))) |
852 | return_trace (false); |
853 | |
854 | if (featureParams == 0 && closure && |
855 | closure->tag == HB_TAG ('s','i','z','e') && |
856 | closure->list_base && closure->list_base < this) |
857 | { |
858 | unsigned int new_offset_int = orig_offset - |
859 | (((char *) this) - ((char *) closure->list_base)); |
860 | |
861 | Offset16To<FeatureParams> new_offset; |
862 | /* Check that it would not overflow. */ |
863 | new_offset = new_offset_int; |
864 | if (new_offset == new_offset_int && |
865 | c->try_set (&featureParams, new_offset_int) && |
866 | !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)) |
867 | return_trace (false); |
868 | } |
869 | |
870 | return_trace (true); |
871 | } |
872 | |
873 | Offset16To<FeatureParams> |
874 | featureParams; /* Offset to Feature Parameters table (if one |
875 | * has been defined for the feature), relative |
876 | * to the beginning of the Feature Table; = Null |
877 | * if not required */ |
878 | IndexArray lookupIndex; /* Array of LookupList indices */ |
879 | public: |
880 | DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex); |
881 | }; |
882 | |
883 | template <typename Type> |
884 | struct Record |
885 | { |
886 | int cmp (hb_tag_t a) const { return tag.cmp (a); } |
887 | |
888 | bool subset (hb_subset_layout_context_t *c, const void *base, const void *f_sub = nullptr) const |
889 | { |
890 | TRACE_SUBSET (this); |
891 | auto *out = c->subset_context->serializer->embed (this); |
892 | if (unlikely (!out)) return_trace (false); |
893 | |
894 | if (!f_sub) |
895 | return_trace (out->offset.serialize_subset (c->subset_context, offset, base, c, &tag)); |
896 | |
897 | const Feature& f = *reinterpret_cast<const Feature *> (f_sub); |
898 | auto *s = c->subset_context->serializer; |
899 | s->push (); |
900 | |
901 | out->offset = 0; |
902 | bool ret = f.subset (c->subset_context, c, &tag); |
903 | if (ret) |
904 | s->add_link (out->offset, s->pop_pack ()); |
905 | else |
906 | s->pop_discard (); |
907 | |
908 | return_trace (ret); |
909 | } |
910 | |
911 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
912 | { |
913 | TRACE_SANITIZE (this); |
914 | const Record_sanitize_closure_t closure = {tag, base}; |
915 | return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure)); |
916 | } |
917 | |
918 | Tag tag; /* 4-byte Tag identifier */ |
919 | Offset16To<Type> |
920 | offset; /* Offset from beginning of object holding |
921 | * the Record */ |
922 | public: |
923 | DEFINE_SIZE_STATIC (6); |
924 | }; |
925 | |
926 | template <typename Type> |
927 | struct RecordArrayOf : SortedArray16Of<Record<Type>> |
928 | { |
929 | const Offset16To<Type>& get_offset (unsigned int i) const |
930 | { return (*this)[i].offset; } |
931 | Offset16To<Type>& get_offset (unsigned int i) |
932 | { return (*this)[i].offset; } |
933 | const Tag& get_tag (unsigned int i) const |
934 | { return (*this)[i].tag; } |
935 | unsigned int get_tags (unsigned int start_offset, |
936 | unsigned int *record_count /* IN/OUT */, |
937 | hb_tag_t *record_tags /* OUT */) const |
938 | { |
939 | if (record_count) |
940 | { |
941 | + this->as_array ().sub_array (start_offset, record_count) |
942 | | hb_map (&Record<Type>::tag) |
943 | | hb_sink (hb_array (record_tags, *record_count)) |
944 | ; |
945 | } |
946 | return this->len; |
947 | } |
948 | bool find_index (hb_tag_t tag, unsigned int *index) const |
949 | { |
950 | return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX); |
951 | } |
952 | }; |
953 | |
954 | template <typename Type> |
955 | struct RecordListOf : RecordArrayOf<Type> |
956 | { |
957 | const Type& operator [] (unsigned int i) const |
958 | { return this+this->get_offset (i); } |
959 | |
960 | bool subset (hb_subset_context_t *c, |
961 | hb_subset_layout_context_t *l) const |
962 | { |
963 | TRACE_SUBSET (this); |
964 | auto *out = c->serializer->start_embed (*this); |
965 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
966 | |
967 | + this->iter () |
968 | | hb_apply (subset_record_array (l, out, this)) |
969 | ; |
970 | return_trace (true); |
971 | } |
972 | |
973 | bool sanitize (hb_sanitize_context_t *c) const |
974 | { |
975 | TRACE_SANITIZE (this); |
976 | return_trace (RecordArrayOf<Type>::sanitize (c, this)); |
977 | } |
978 | }; |
979 | |
980 | struct RecordListOfFeature : RecordListOf<Feature> |
981 | { |
982 | bool subset (hb_subset_context_t *c, |
983 | hb_subset_layout_context_t *l) const |
984 | { |
985 | TRACE_SUBSET (this); |
986 | auto *out = c->serializer->start_embed (*this); |
987 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
988 | |
989 | + hb_enumerate (*this) |
990 | | hb_filter (l->feature_index_map, hb_first) |
991 | | hb_apply ([l, out, this] (const hb_pair_t<unsigned, const Record<Feature>&>& _) |
992 | { |
993 | const Feature *f_sub = nullptr; |
994 | const Feature **f = nullptr; |
995 | if (l->feature_substitutes_map->has (_.first, &f)) |
996 | f_sub = *f; |
997 | |
998 | subset_record_array (l, out, this, f_sub) (_.second); |
999 | }) |
1000 | ; |
1001 | |
1002 | return_trace (true); |
1003 | } |
1004 | }; |
1005 | |
1006 | typedef RecordListOf<Feature> FeatureList; |
1007 | |
1008 | |
1009 | struct LangSys |
1010 | { |
1011 | unsigned int get_feature_count () const |
1012 | { return featureIndex.len; } |
1013 | hb_tag_t get_feature_index (unsigned int i) const |
1014 | { return featureIndex[i]; } |
1015 | unsigned int get_feature_indexes (unsigned int start_offset, |
1016 | unsigned int *feature_count /* IN/OUT */, |
1017 | unsigned int *feature_indexes /* OUT */) const |
1018 | { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); } |
1019 | void add_feature_indexes_to (hb_set_t *feature_indexes) const |
1020 | { featureIndex.add_indexes_to (feature_indexes); } |
1021 | |
1022 | bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; } |
1023 | unsigned int get_required_feature_index () const |
1024 | { |
1025 | if (reqFeatureIndex == 0xFFFFu) |
1026 | return Index::NOT_FOUND_INDEX; |
1027 | return reqFeatureIndex; |
1028 | } |
1029 | |
1030 | LangSys* copy (hb_serialize_context_t *c) const |
1031 | { |
1032 | TRACE_SERIALIZE (this); |
1033 | return_trace (c->embed (*this)); |
1034 | } |
1035 | |
1036 | bool compare (const LangSys& o, const hb_map_t *feature_index_map) const |
1037 | { |
1038 | if (reqFeatureIndex != o.reqFeatureIndex) |
1039 | return false; |
1040 | |
1041 | auto iter = |
1042 | + hb_iter (featureIndex) |
1043 | | hb_filter (feature_index_map) |
1044 | | hb_map (feature_index_map) |
1045 | ; |
1046 | |
1047 | auto o_iter = |
1048 | + hb_iter (o.featureIndex) |
1049 | | hb_filter (feature_index_map) |
1050 | | hb_map (feature_index_map) |
1051 | ; |
1052 | |
1053 | for (; iter && o_iter; iter++, o_iter++) |
1054 | { |
1055 | unsigned a = *iter; |
1056 | unsigned b = *o_iter; |
1057 | if (a != b) return false; |
1058 | } |
1059 | |
1060 | if (iter || o_iter) return false; |
1061 | |
1062 | return true; |
1063 | } |
1064 | |
1065 | void collect_features (hb_prune_langsys_context_t *c) const |
1066 | { |
1067 | if (!has_required_feature () && !get_feature_count ()) return; |
1068 | if (has_required_feature () && |
1069 | c->duplicate_feature_map->has (reqFeatureIndex)) |
1070 | c->new_feature_indexes->add (get_required_feature_index ()); |
1071 | |
1072 | + hb_iter (featureIndex) |
1073 | | hb_filter (c->duplicate_feature_map) |
1074 | | hb_sink (c->new_feature_indexes) |
1075 | ; |
1076 | } |
1077 | |
1078 | bool subset (hb_subset_context_t *c, |
1079 | hb_subset_layout_context_t *l, |
1080 | const Tag *tag = nullptr) const |
1081 | { |
1082 | TRACE_SUBSET (this); |
1083 | auto *out = c->serializer->start_embed (*this); |
1084 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
1085 | |
1086 | const uint32_t *v; |
1087 | out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu; |
1088 | |
1089 | if (!l->visitFeatureIndex (featureIndex.len)) |
1090 | return_trace (false); |
1091 | |
1092 | auto it = |
1093 | + hb_iter (featureIndex) |
1094 | | hb_filter (l->feature_index_map) |
1095 | | hb_map (l->feature_index_map) |
1096 | ; |
1097 | |
1098 | bool ret = bool (it); |
1099 | out->featureIndex.serialize (c->serializer, l, it); |
1100 | return_trace (ret); |
1101 | } |
1102 | |
1103 | bool sanitize (hb_sanitize_context_t *c, |
1104 | const Record_sanitize_closure_t * = nullptr) const |
1105 | { |
1106 | TRACE_SANITIZE (this); |
1107 | return_trace (c->check_struct (this) && featureIndex.sanitize (c)); |
1108 | } |
1109 | |
1110 | Offset16 lookupOrderZ; /* = Null (reserved for an offset to a |
1111 | * reordering table) */ |
1112 | HBUINT16 reqFeatureIndex;/* Index of a feature required for this |
1113 | * language system--if no required features |
1114 | * = 0xFFFFu */ |
1115 | IndexArray featureIndex; /* Array of indices into the FeatureList */ |
1116 | public: |
1117 | DEFINE_SIZE_ARRAY_SIZED (6, featureIndex); |
1118 | }; |
1119 | DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys); |
1120 | |
1121 | struct Script |
1122 | { |
1123 | unsigned int get_lang_sys_count () const |
1124 | { return langSys.len; } |
1125 | const Tag& get_lang_sys_tag (unsigned int i) const |
1126 | { return langSys.get_tag (i); } |
1127 | unsigned int get_lang_sys_tags (unsigned int start_offset, |
1128 | unsigned int *lang_sys_count /* IN/OUT */, |
1129 | hb_tag_t *lang_sys_tags /* OUT */) const |
1130 | { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); } |
1131 | const LangSys& get_lang_sys (unsigned int i) const |
1132 | { |
1133 | if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys (); |
1134 | return this+langSys[i].offset; |
1135 | } |
1136 | bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const |
1137 | { return langSys.find_index (tag, index); } |
1138 | |
1139 | bool has_default_lang_sys () const { return defaultLangSys != 0; } |
1140 | const LangSys& get_default_lang_sys () const { return this+defaultLangSys; } |
1141 | |
1142 | void prune_langsys (hb_prune_langsys_context_t *c, |
1143 | unsigned script_index) const |
1144 | { |
1145 | if (!has_default_lang_sys () && !get_lang_sys_count ()) return; |
1146 | if (!c->visitScript ()) return; |
1147 | |
1148 | if (!c->script_langsys_map->has (script_index)) |
1149 | { |
1150 | if (unlikely (!c->script_langsys_map->set (script_index, hb::unique_ptr<hb_set_t> {hb_set_create ()}))) |
1151 | return; |
1152 | } |
1153 | |
1154 | if (has_default_lang_sys ()) |
1155 | { |
1156 | //only collect features from non-redundant langsys |
1157 | const LangSys& d = get_default_lang_sys (); |
1158 | if (c->visitLangsys (d.get_feature_count ())) { |
1159 | d.collect_features (c); |
1160 | } |
1161 | |
1162 | for (auto _ : + hb_enumerate (langSys)) |
1163 | { |
1164 | const LangSys& l = this+_.second.offset; |
1165 | if (!c->visitLangsys (l.get_feature_count ())) continue; |
1166 | if (l.compare (d, c->duplicate_feature_map)) continue; |
1167 | |
1168 | l.collect_features (c); |
1169 | c->script_langsys_map->get (script_index)->add (_.first); |
1170 | } |
1171 | } |
1172 | else |
1173 | { |
1174 | for (auto _ : + hb_enumerate (langSys)) |
1175 | { |
1176 | const LangSys& l = this+_.second.offset; |
1177 | if (!c->visitLangsys (l.get_feature_count ())) continue; |
1178 | l.collect_features (c); |
1179 | c->script_langsys_map->get (script_index)->add (_.first); |
1180 | } |
1181 | } |
1182 | } |
1183 | |
1184 | bool subset (hb_subset_context_t *c, |
1185 | hb_subset_layout_context_t *l, |
1186 | const Tag *tag) const |
1187 | { |
1188 | TRACE_SUBSET (this); |
1189 | if (!l->visitScript ()) return_trace (false); |
1190 | if (tag && !c->plan->layout_scripts.has (*tag)) |
1191 | return false; |
1192 | |
1193 | auto *out = c->serializer->start_embed (*this); |
1194 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
1195 | |
1196 | bool defaultLang = false; |
1197 | if (has_default_lang_sys ()) |
1198 | { |
1199 | c->serializer->push (); |
1200 | const LangSys& ls = this+defaultLangSys; |
1201 | bool ret = ls.subset (c, l); |
1202 | if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T')) |
1203 | { |
1204 | c->serializer->pop_discard (); |
1205 | out->defaultLangSys = 0; |
1206 | } |
1207 | else |
1208 | { |
1209 | c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ()); |
1210 | defaultLang = true; |
1211 | } |
1212 | } |
1213 | |
1214 | const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index); |
1215 | if (active_langsys) |
1216 | { |
1217 | + hb_enumerate (langSys) |
1218 | | hb_filter (active_langsys, hb_first) |
1219 | | hb_map (hb_second) |
1220 | | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); }) |
1221 | | hb_apply (subset_record_array (l, &(out->langSys), this)) |
1222 | ; |
1223 | } |
1224 | |
1225 | return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB); |
1226 | } |
1227 | |
1228 | bool sanitize (hb_sanitize_context_t *c, |
1229 | const Record_sanitize_closure_t * = nullptr) const |
1230 | { |
1231 | TRACE_SANITIZE (this); |
1232 | return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this)); |
1233 | } |
1234 | |
1235 | protected: |
1236 | Offset16To<LangSys> |
1237 | defaultLangSys; /* Offset to DefaultLangSys table--from |
1238 | * beginning of Script table--may be Null */ |
1239 | RecordArrayOf<LangSys> |
1240 | langSys; /* Array of LangSysRecords--listed |
1241 | * alphabetically by LangSysTag */ |
1242 | public: |
1243 | DEFINE_SIZE_ARRAY_SIZED (4, langSys); |
1244 | }; |
1245 | |
1246 | struct RecordListOfScript : RecordListOf<Script> |
1247 | { |
1248 | bool subset (hb_subset_context_t *c, |
1249 | hb_subset_layout_context_t *l) const |
1250 | { |
1251 | TRACE_SUBSET (this); |
1252 | auto *out = c->serializer->start_embed (*this); |
1253 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
1254 | |
1255 | for (auto _ : + hb_enumerate (*this)) |
1256 | { |
1257 | auto snap = c->serializer->snapshot (); |
1258 | l->cur_script_index = _.first; |
1259 | bool ret = _.second.subset (l, this); |
1260 | if (!ret) c->serializer->revert (snap); |
1261 | else out->len++; |
1262 | } |
1263 | |
1264 | return_trace (true); |
1265 | } |
1266 | }; |
1267 | |
1268 | typedef RecordListOfScript ScriptList; |
1269 | |
1270 | |
1271 | |
1272 | struct LookupFlag : HBUINT16 |
1273 | { |
1274 | enum Flags { |
1275 | RightToLeft = 0x0001u, |
1276 | IgnoreBaseGlyphs = 0x0002u, |
1277 | IgnoreLigatures = 0x0004u, |
1278 | = 0x0008u, |
1279 | IgnoreFlags = 0x000Eu, |
1280 | UseMarkFilteringSet = 0x0010u, |
1281 | Reserved = 0x00E0u, |
1282 | MarkAttachmentType = 0xFF00u |
1283 | }; |
1284 | public: |
1285 | DEFINE_SIZE_STATIC (2); |
1286 | }; |
1287 | |
1288 | } /* namespace OT */ |
1289 | /* This has to be outside the namespace. */ |
1290 | HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags); |
1291 | namespace OT { |
1292 | |
1293 | struct Lookup |
1294 | { |
1295 | unsigned int get_subtable_count () const { return subTable.len; } |
1296 | |
1297 | template <typename TSubTable> |
1298 | const Array16OfOffset16To<TSubTable>& get_subtables () const |
1299 | { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); } |
1300 | template <typename TSubTable> |
1301 | Array16OfOffset16To<TSubTable>& get_subtables () |
1302 | { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); } |
1303 | |
1304 | template <typename TSubTable> |
1305 | const TSubTable& get_subtable (unsigned int i) const |
1306 | { return this+get_subtables<TSubTable> ()[i]; } |
1307 | template <typename TSubTable> |
1308 | TSubTable& get_subtable (unsigned int i) |
1309 | { return this+get_subtables<TSubTable> ()[i]; } |
1310 | |
1311 | unsigned int get_size () const |
1312 | { |
1313 | const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable); |
1314 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1315 | return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this; |
1316 | return (const char *) &markFilteringSet - (const char *) this; |
1317 | } |
1318 | |
1319 | unsigned int get_type () const { return lookupType; } |
1320 | |
1321 | /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and |
1322 | * higher 16-bit is mark-filtering-set if the lookup uses one. |
1323 | * Not to be confused with glyph_props which is very similar. */ |
1324 | uint32_t get_props () const |
1325 | { |
1326 | unsigned int flag = lookupFlag; |
1327 | if (unlikely (flag & LookupFlag::UseMarkFilteringSet)) |
1328 | { |
1329 | const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1330 | flag += (markFilteringSet << 16); |
1331 | } |
1332 | return flag; |
1333 | } |
1334 | |
1335 | template <typename TSubTable, typename context_t, typename ...Ts> |
1336 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
1337 | { |
1338 | unsigned int lookup_type = get_type (); |
1339 | TRACE_DISPATCH (this, lookup_type); |
1340 | unsigned int count = get_subtable_count (); |
1341 | for (unsigned int i = 0; i < count; i++) { |
1342 | typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, std::forward<Ts> (ds)...); |
1343 | if (c->stop_sublookup_iteration (r)) |
1344 | return_trace (r); |
1345 | } |
1346 | return_trace (c->default_return_value ()); |
1347 | } |
1348 | |
1349 | bool serialize (hb_serialize_context_t *c, |
1350 | unsigned int lookup_type, |
1351 | uint32_t lookup_props, |
1352 | unsigned int num_subtables) |
1353 | { |
1354 | TRACE_SERIALIZE (this); |
1355 | if (unlikely (!c->extend_min (this))) return_trace (false); |
1356 | lookupType = lookup_type; |
1357 | lookupFlag = lookup_props & 0xFFFFu; |
1358 | if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false); |
1359 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1360 | { |
1361 | if (unlikely (!c->extend (this))) return_trace (false); |
1362 | HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1363 | markFilteringSet = lookup_props >> 16; |
1364 | } |
1365 | return_trace (true); |
1366 | } |
1367 | |
1368 | template <typename TSubTable> |
1369 | bool subset (hb_subset_context_t *c) const |
1370 | { |
1371 | TRACE_SUBSET (this); |
1372 | auto *out = c->serializer->start_embed (*this); |
1373 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
1374 | out->lookupType = lookupType; |
1375 | out->lookupFlag = lookupFlag; |
1376 | |
1377 | const hb_set_t *glyphset = c->plan->glyphset_gsub (); |
1378 | unsigned int lookup_type = get_type (); |
1379 | + hb_iter (get_subtables <TSubTable> ()) |
1380 | | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); }) |
1381 | | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type)) |
1382 | ; |
1383 | |
1384 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1385 | { |
1386 | if (unlikely (!c->serializer->extend (out))) return_trace (false); |
1387 | const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1388 | HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable); |
1389 | outMarkFilteringSet = markFilteringSet; |
1390 | } |
1391 | |
1392 | // Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup |
1393 | // indices being consistent with those computed during planning. So if an empty lookup is |
1394 | // discarded during the subset phase it will invalidate all subsequent lookup indices. |
1395 | // Generally we shouldn't end up with an empty lookup as we pre-prune them during the planning |
1396 | // phase, but it can happen in rare cases such as when during closure subtable is considered |
1397 | // degenerate (see: https://github.com/harfbuzz/harfbuzz/issues/3853) |
1398 | return_trace (true); |
1399 | } |
1400 | |
1401 | template <typename TSubTable> |
1402 | bool sanitize (hb_sanitize_context_t *c) const |
1403 | { |
1404 | TRACE_SANITIZE (this); |
1405 | if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false); |
1406 | |
1407 | unsigned subtables = get_subtable_count (); |
1408 | if (unlikely (!c->visit_subtables (subtables))) return_trace (false); |
1409 | |
1410 | if (lookupFlag & LookupFlag::UseMarkFilteringSet) |
1411 | { |
1412 | const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable); |
1413 | if (!markFilteringSet.sanitize (c)) return_trace (false); |
1414 | } |
1415 | |
1416 | if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ()))) |
1417 | return_trace (false); |
1418 | |
1419 | if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ())) |
1420 | { |
1421 | /* The spec says all subtables of an Extension lookup should |
1422 | * have the same type, which shall not be the Extension type |
1423 | * itself (but we already checked for that). |
1424 | * This is specially important if one has a reverse type! |
1425 | * |
1426 | * We only do this if sanitizer edit_count is zero. Otherwise, |
1427 | * some of the subtables might have become insane after they |
1428 | * were sanity-checked by the edits of subsequent subtables. |
1429 | * https://bugs.chromium.org/p/chromium/issues/detail?id=960331 |
1430 | */ |
1431 | unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type (); |
1432 | for (unsigned int i = 1; i < subtables; i++) |
1433 | if (get_subtable<TSubTable> (i).u.extension.get_type () != type) |
1434 | return_trace (false); |
1435 | } |
1436 | return_trace (true); |
1437 | } |
1438 | |
1439 | protected: |
1440 | HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */ |
1441 | HBUINT16 lookupFlag; /* Lookup qualifiers */ |
1442 | Array16Of<Offset16> |
1443 | subTable; /* Array of SubTables */ |
1444 | /*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets |
1445 | * structure. This field is only present if bit |
1446 | * UseMarkFilteringSet of lookup flags is set. */ |
1447 | public: |
1448 | DEFINE_SIZE_ARRAY (6, subTable); |
1449 | }; |
1450 | |
1451 | template <typename Types> |
1452 | using LookupList = List16OfOffsetTo<Lookup, typename Types::HBUINT>; |
1453 | |
1454 | template <typename TLookup, typename OffsetType> |
1455 | struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType> |
1456 | { |
1457 | bool subset (hb_subset_context_t *c, |
1458 | hb_subset_layout_context_t *l) const |
1459 | { |
1460 | TRACE_SUBSET (this); |
1461 | auto *out = c->serializer->start_embed (this); |
1462 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
1463 | |
1464 | + hb_enumerate (*this) |
1465 | | hb_filter (l->lookup_index_map, hb_first) |
1466 | | hb_map (hb_second) |
1467 | | hb_apply (subset_offset_array (c, *out, this)) |
1468 | ; |
1469 | return_trace (true); |
1470 | } |
1471 | |
1472 | bool sanitize (hb_sanitize_context_t *c) const |
1473 | { |
1474 | TRACE_SANITIZE (this); |
1475 | return_trace (List16OfOffset16To<TLookup>::sanitize (c, this)); |
1476 | } |
1477 | }; |
1478 | |
1479 | |
1480 | /* |
1481 | * Coverage Table |
1482 | */ |
1483 | |
1484 | |
1485 | static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c, |
1486 | const hb_set_t &klasses, |
1487 | bool use_class_zero, |
1488 | hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */ |
1489 | hb_map_t *klass_map /*IN/OUT*/) |
1490 | { |
1491 | if (!klass_map) |
1492 | return ClassDef_serialize (c, glyph_and_klass.iter ()); |
1493 | |
1494 | /* any glyph not assigned a class value falls into Class zero (0), |
1495 | * if any glyph assigned to class 0, remapping must start with 0->0*/ |
1496 | if (!use_class_zero) |
1497 | klass_map->set (0, 0); |
1498 | |
1499 | unsigned idx = klass_map->has (0) ? 1 : 0; |
1500 | for (const unsigned k: klasses) |
1501 | { |
1502 | if (klass_map->has (k)) continue; |
1503 | klass_map->set (k, idx); |
1504 | idx++; |
1505 | } |
1506 | |
1507 | |
1508 | for (unsigned i = 0; i < glyph_and_klass.length; i++) |
1509 | { |
1510 | hb_codepoint_t klass = glyph_and_klass[i].second; |
1511 | glyph_and_klass[i].second = klass_map->get (klass); |
1512 | } |
1513 | |
1514 | c->propagate_error (glyph_and_klass, klasses); |
1515 | return ClassDef_serialize (c, glyph_and_klass.iter ()); |
1516 | } |
1517 | |
1518 | /* |
1519 | * Class Definition Table |
1520 | */ |
1521 | |
1522 | template <typename Types> |
1523 | struct ClassDefFormat1_3 |
1524 | { |
1525 | friend struct ClassDef; |
1526 | |
1527 | private: |
1528 | unsigned int get_class (hb_codepoint_t glyph_id) const |
1529 | { |
1530 | return classValue[(unsigned int) (glyph_id - startGlyph)]; |
1531 | } |
1532 | |
1533 | unsigned get_population () const |
1534 | { |
1535 | return classValue.len; |
1536 | } |
1537 | |
1538 | template<typename Iterator, |
1539 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))> |
1540 | bool serialize (hb_serialize_context_t *c, |
1541 | Iterator it) |
1542 | { |
1543 | TRACE_SERIALIZE (this); |
1544 | if (unlikely (!c->extend_min (this))) return_trace (false); |
1545 | |
1546 | if (unlikely (!it)) |
1547 | { |
1548 | classFormat = 1; |
1549 | startGlyph = 0; |
1550 | classValue.len = 0; |
1551 | return_trace (true); |
1552 | } |
1553 | |
1554 | hb_codepoint_t glyph_min = (*it).first; |
1555 | hb_codepoint_t glyph_max = + it |
1556 | | hb_map (hb_first) |
1557 | | hb_reduce (hb_max, 0u); |
1558 | unsigned glyph_count = glyph_max - glyph_min + 1; |
1559 | |
1560 | startGlyph = glyph_min; |
1561 | if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false); |
1562 | for (const hb_pair_t<hb_codepoint_t, uint32_t> gid_klass_pair : + it) |
1563 | { |
1564 | unsigned idx = gid_klass_pair.first - glyph_min; |
1565 | classValue[idx] = gid_klass_pair.second; |
1566 | } |
1567 | return_trace (true); |
1568 | } |
1569 | |
1570 | bool subset (hb_subset_context_t *c, |
1571 | hb_map_t *klass_map = nullptr /*OUT*/, |
1572 | bool keep_empty_table = true, |
1573 | bool use_class_zero = true, |
1574 | const Coverage* glyph_filter = nullptr) const |
1575 | { |
1576 | TRACE_SUBSET (this); |
1577 | const hb_map_t &glyph_map = c->plan->glyph_map_gsub; |
1578 | |
1579 | hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass; |
1580 | hb_set_t orig_klasses; |
1581 | |
1582 | hb_codepoint_t start = startGlyph; |
1583 | hb_codepoint_t end = start + classValue.len; |
1584 | |
1585 | for (const hb_codepoint_t gid : + hb_range (start, end)) |
1586 | { |
1587 | hb_codepoint_t new_gid = glyph_map[gid]; |
1588 | if (new_gid == HB_MAP_VALUE_INVALID) continue; |
1589 | if (glyph_filter && !glyph_filter->has(gid)) continue; |
1590 | |
1591 | unsigned klass = classValue[gid - start]; |
1592 | if (!klass) continue; |
1593 | |
1594 | glyph_and_klass.push (hb_pair (new_gid, klass)); |
1595 | orig_klasses.add (klass); |
1596 | } |
1597 | |
1598 | if (use_class_zero) |
1599 | { |
1600 | unsigned glyph_count = glyph_filter |
1601 | ? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter)) |
1602 | : glyph_map.get_population (); |
1603 | use_class_zero = glyph_count <= glyph_and_klass.length; |
1604 | } |
1605 | if (!ClassDef_remap_and_serialize (c->serializer, |
1606 | orig_klasses, |
1607 | use_class_zero, |
1608 | glyph_and_klass, |
1609 | klass_map)) |
1610 | return_trace (false); |
1611 | return_trace (keep_empty_table || (bool) glyph_and_klass); |
1612 | } |
1613 | |
1614 | bool sanitize (hb_sanitize_context_t *c) const |
1615 | { |
1616 | TRACE_SANITIZE (this); |
1617 | return_trace (c->check_struct (this) && classValue.sanitize (c)); |
1618 | } |
1619 | |
1620 | unsigned cost () const { return 1; } |
1621 | |
1622 | template <typename set_t> |
1623 | bool collect_coverage (set_t *glyphs) const |
1624 | { |
1625 | unsigned int start = 0; |
1626 | unsigned int count = classValue.len; |
1627 | for (unsigned int i = 0; i < count; i++) |
1628 | { |
1629 | if (classValue[i]) |
1630 | continue; |
1631 | |
1632 | if (start != i) |
1633 | if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i))) |
1634 | return false; |
1635 | |
1636 | start = i + 1; |
1637 | } |
1638 | if (start != count) |
1639 | if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count))) |
1640 | return false; |
1641 | |
1642 | return true; |
1643 | } |
1644 | |
1645 | template <typename set_t> |
1646 | bool collect_class (set_t *glyphs, unsigned klass) const |
1647 | { |
1648 | unsigned int count = classValue.len; |
1649 | for (unsigned int i = 0; i < count; i++) |
1650 | if (classValue[i] == klass) glyphs->add (startGlyph + i); |
1651 | return true; |
1652 | } |
1653 | |
1654 | bool intersects (const hb_set_t *glyphs) const |
1655 | { |
1656 | hb_codepoint_t start = startGlyph; |
1657 | hb_codepoint_t end = startGlyph + classValue.len; |
1658 | for (hb_codepoint_t iter = startGlyph - 1; |
1659 | glyphs->next (&iter) && iter < end;) |
1660 | if (classValue[iter - start]) return true; |
1661 | return false; |
1662 | } |
1663 | bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const |
1664 | { |
1665 | unsigned int count = classValue.len; |
1666 | if (klass == 0) |
1667 | { |
1668 | /* Match if there's any glyph that is not listed! */ |
1669 | hb_codepoint_t g = HB_SET_VALUE_INVALID; |
1670 | if (!glyphs->next (&g)) return false; |
1671 | if (g < startGlyph) return true; |
1672 | g = startGlyph + count - 1; |
1673 | if (glyphs->next (&g)) return true; |
1674 | /* Fall through. */ |
1675 | } |
1676 | /* TODO Speed up, using set overlap first? */ |
1677 | /* TODO(iter) Rewrite as dagger. */ |
1678 | const HBUINT16 *arr = classValue.arrayZ; |
1679 | for (unsigned int i = 0; i < count; i++) |
1680 | if (arr[i] == klass && glyphs->has (startGlyph + i)) |
1681 | return true; |
1682 | return false; |
1683 | } |
1684 | |
1685 | void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const |
1686 | { |
1687 | unsigned count = classValue.len; |
1688 | if (klass == 0) |
1689 | { |
1690 | unsigned start_glyph = startGlyph; |
1691 | for (uint32_t g = HB_SET_VALUE_INVALID; |
1692 | glyphs->next (&g) && g < start_glyph;) |
1693 | intersect_glyphs->add (g); |
1694 | |
1695 | for (uint32_t g = startGlyph + count - 1; |
1696 | glyphs-> next (&g);) |
1697 | intersect_glyphs->add (g); |
1698 | |
1699 | return; |
1700 | } |
1701 | |
1702 | for (unsigned i = 0; i < count; i++) |
1703 | if (classValue[i] == klass && glyphs->has (startGlyph + i)) |
1704 | intersect_glyphs->add (startGlyph + i); |
1705 | |
1706 | #if 0 |
1707 | /* The following implementation is faster asymptotically, but slower |
1708 | * in practice. */ |
1709 | unsigned start_glyph = startGlyph; |
1710 | unsigned end_glyph = start_glyph + count; |
1711 | for (unsigned g = startGlyph - 1; |
1712 | glyphs->next (&g) && g < end_glyph;) |
1713 | if (classValue.arrayZ[g - start_glyph] == klass) |
1714 | intersect_glyphs->add (g); |
1715 | #endif |
1716 | } |
1717 | |
1718 | void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const |
1719 | { |
1720 | if (glyphs->is_empty ()) return; |
1721 | hb_codepoint_t end_glyph = startGlyph + classValue.len - 1; |
1722 | if (glyphs->get_min () < startGlyph || |
1723 | glyphs->get_max () > end_glyph) |
1724 | intersect_classes->add (0); |
1725 | |
1726 | for (const auto& _ : + hb_enumerate (classValue)) |
1727 | { |
1728 | hb_codepoint_t g = startGlyph + _.first; |
1729 | if (glyphs->has (g)) |
1730 | intersect_classes->add (_.second); |
1731 | } |
1732 | } |
1733 | |
1734 | protected: |
1735 | HBUINT16 classFormat; /* Format identifier--format = 1 */ |
1736 | typename Types::HBGlyphID |
1737 | startGlyph; /* First GlyphID of the classValueArray */ |
1738 | typename Types::template ArrayOf<HBUINT16> |
1739 | classValue; /* Array of Class Values--one per GlyphID */ |
1740 | public: |
1741 | DEFINE_SIZE_ARRAY (2 + 2 * Types::size, classValue); |
1742 | }; |
1743 | |
1744 | template <typename Types> |
1745 | struct ClassDefFormat2_4 |
1746 | { |
1747 | friend struct ClassDef; |
1748 | |
1749 | private: |
1750 | unsigned int get_class (hb_codepoint_t glyph_id) const |
1751 | { |
1752 | return rangeRecord.bsearch (glyph_id).value; |
1753 | } |
1754 | |
1755 | unsigned get_population () const |
1756 | { |
1757 | typename Types::large_int ret = 0; |
1758 | for (const auto &r : rangeRecord) |
1759 | ret += r.get_population (); |
1760 | return ret > UINT_MAX ? UINT_MAX : (unsigned) ret; |
1761 | } |
1762 | |
1763 | template<typename Iterator, |
1764 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))> |
1765 | bool serialize (hb_serialize_context_t *c, |
1766 | Iterator it) |
1767 | { |
1768 | TRACE_SERIALIZE (this); |
1769 | if (unlikely (!c->extend_min (this))) return_trace (false); |
1770 | |
1771 | if (unlikely (!it)) |
1772 | { |
1773 | classFormat = 2; |
1774 | rangeRecord.len = 0; |
1775 | return_trace (true); |
1776 | } |
1777 | |
1778 | unsigned unsorted = false; |
1779 | unsigned num_ranges = 1; |
1780 | hb_codepoint_t prev_gid = (*it).first; |
1781 | unsigned prev_klass = (*it).second; |
1782 | |
1783 | RangeRecord<Types> range_rec; |
1784 | range_rec.first = prev_gid; |
1785 | range_rec.last = prev_gid; |
1786 | range_rec.value = prev_klass; |
1787 | |
1788 | auto *record = c->copy (range_rec); |
1789 | if (unlikely (!record)) return_trace (false); |
1790 | |
1791 | for (const auto gid_klass_pair : + (++it)) |
1792 | { |
1793 | hb_codepoint_t cur_gid = gid_klass_pair.first; |
1794 | unsigned cur_klass = gid_klass_pair.second; |
1795 | |
1796 | if (cur_gid != prev_gid + 1 || |
1797 | cur_klass != prev_klass) |
1798 | { |
1799 | |
1800 | if (unlikely (cur_gid < prev_gid)) |
1801 | unsorted = true; |
1802 | |
1803 | if (unlikely (!record)) break; |
1804 | record->last = prev_gid; |
1805 | num_ranges++; |
1806 | |
1807 | range_rec.first = cur_gid; |
1808 | range_rec.last = cur_gid; |
1809 | range_rec.value = cur_klass; |
1810 | |
1811 | record = c->copy (range_rec); |
1812 | } |
1813 | |
1814 | prev_klass = cur_klass; |
1815 | prev_gid = cur_gid; |
1816 | } |
1817 | |
1818 | if (unlikely (c->in_error ())) return_trace (false); |
1819 | |
1820 | if (likely (record)) record->last = prev_gid; |
1821 | rangeRecord.len = num_ranges; |
1822 | |
1823 | if (unlikely (unsorted)) |
1824 | rangeRecord.as_array ().qsort (RangeRecord<Types>::cmp_range); |
1825 | |
1826 | return_trace (true); |
1827 | } |
1828 | |
1829 | bool subset (hb_subset_context_t *c, |
1830 | hb_map_t *klass_map = nullptr /*OUT*/, |
1831 | bool keep_empty_table = true, |
1832 | bool use_class_zero = true, |
1833 | const Coverage* glyph_filter = nullptr) const |
1834 | { |
1835 | TRACE_SUBSET (this); |
1836 | const hb_map_t &glyph_map = c->plan->glyph_map_gsub; |
1837 | const hb_set_t &glyph_set = *c->plan->glyphset_gsub (); |
1838 | |
1839 | hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass; |
1840 | hb_set_t orig_klasses; |
1841 | |
1842 | if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2 |
1843 | < get_population ()) |
1844 | { |
1845 | for (hb_codepoint_t g : glyph_set) |
1846 | { |
1847 | unsigned klass = get_class (g); |
1848 | if (!klass) continue; |
1849 | hb_codepoint_t new_gid = glyph_map[g]; |
1850 | if (new_gid == HB_MAP_VALUE_INVALID) continue; |
1851 | if (glyph_filter && !glyph_filter->has (g)) continue; |
1852 | glyph_and_klass.push (hb_pair (new_gid, klass)); |
1853 | orig_klasses.add (klass); |
1854 | } |
1855 | } |
1856 | else |
1857 | { |
1858 | unsigned num_source_glyphs = c->plan->source->get_num_glyphs (); |
1859 | for (auto &range : rangeRecord) |
1860 | { |
1861 | unsigned klass = range.value; |
1862 | if (!klass) continue; |
1863 | hb_codepoint_t start = range.first; |
1864 | hb_codepoint_t end = hb_min (range.last + 1, num_source_glyphs); |
1865 | for (hb_codepoint_t g = start; g < end; g++) |
1866 | { |
1867 | hb_codepoint_t new_gid = glyph_map[g]; |
1868 | if (new_gid == HB_MAP_VALUE_INVALID) continue; |
1869 | if (glyph_filter && !glyph_filter->has (g)) continue; |
1870 | |
1871 | glyph_and_klass.push (hb_pair (new_gid, klass)); |
1872 | orig_klasses.add (klass); |
1873 | } |
1874 | } |
1875 | } |
1876 | |
1877 | const hb_set_t& glyphset = *c->plan->glyphset_gsub (); |
1878 | unsigned glyph_count = glyph_filter |
1879 | ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter)) |
1880 | : glyph_map.get_population (); |
1881 | use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length; |
1882 | if (!ClassDef_remap_and_serialize (c->serializer, |
1883 | orig_klasses, |
1884 | use_class_zero, |
1885 | glyph_and_klass, |
1886 | klass_map)) |
1887 | return_trace (false); |
1888 | return_trace (keep_empty_table || (bool) glyph_and_klass); |
1889 | } |
1890 | |
1891 | bool sanitize (hb_sanitize_context_t *c) const |
1892 | { |
1893 | TRACE_SANITIZE (this); |
1894 | return_trace (rangeRecord.sanitize (c)); |
1895 | } |
1896 | |
1897 | unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ } |
1898 | |
1899 | template <typename set_t> |
1900 | bool collect_coverage (set_t *glyphs) const |
1901 | { |
1902 | for (auto &range : rangeRecord) |
1903 | if (range.value) |
1904 | if (unlikely (!range.collect_coverage (glyphs))) |
1905 | return false; |
1906 | return true; |
1907 | } |
1908 | |
1909 | template <typename set_t> |
1910 | bool collect_class (set_t *glyphs, unsigned int klass) const |
1911 | { |
1912 | for (auto &range : rangeRecord) |
1913 | { |
1914 | if (range.value == klass) |
1915 | if (unlikely (!range.collect_coverage (glyphs))) |
1916 | return false; |
1917 | } |
1918 | return true; |
1919 | } |
1920 | |
1921 | bool intersects (const hb_set_t *glyphs) const |
1922 | { |
1923 | if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2) |
1924 | { |
1925 | for (auto g : *glyphs) |
1926 | if (get_class (g)) |
1927 | return true; |
1928 | return false; |
1929 | } |
1930 | |
1931 | return hb_any (+ hb_iter (rangeRecord) |
1932 | | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs) && range.value; })); |
1933 | } |
1934 | bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const |
1935 | { |
1936 | if (klass == 0) |
1937 | { |
1938 | /* Match if there's any glyph that is not listed! */ |
1939 | hb_codepoint_t g = HB_SET_VALUE_INVALID; |
1940 | hb_codepoint_t last = HB_SET_VALUE_INVALID; |
1941 | auto it = hb_iter (rangeRecord); |
1942 | for (auto &range : it) |
1943 | { |
1944 | if (it->first == last + 1) |
1945 | { |
1946 | it++; |
1947 | continue; |
1948 | } |
1949 | |
1950 | if (!glyphs->next (&g)) |
1951 | break; |
1952 | if (g < range.first) |
1953 | return true; |
1954 | g = range.last; |
1955 | last = g; |
1956 | } |
1957 | if (g != HB_SET_VALUE_INVALID && glyphs->next (&g)) |
1958 | return true; |
1959 | /* Fall through. */ |
1960 | } |
1961 | for (const auto &range : rangeRecord) |
1962 | if (range.value == klass && range.intersects (*glyphs)) |
1963 | return true; |
1964 | return false; |
1965 | } |
1966 | |
1967 | void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const |
1968 | { |
1969 | if (klass == 0) |
1970 | { |
1971 | hb_codepoint_t g = HB_SET_VALUE_INVALID; |
1972 | for (auto &range : rangeRecord) |
1973 | { |
1974 | if (!glyphs->next (&g)) |
1975 | goto done; |
1976 | while (g < range.first) |
1977 | { |
1978 | intersect_glyphs->add (g); |
1979 | if (!glyphs->next (&g)) |
1980 | goto done; |
1981 | } |
1982 | g = range.last; |
1983 | } |
1984 | while (glyphs->next (&g)) |
1985 | intersect_glyphs->add (g); |
1986 | done: |
1987 | |
1988 | return; |
1989 | } |
1990 | |
1991 | unsigned count = rangeRecord.len; |
1992 | if (count > glyphs->get_population () * hb_bit_storage (count) * 8) |
1993 | { |
1994 | for (auto g : *glyphs) |
1995 | { |
1996 | unsigned i; |
1997 | if (rangeRecord.as_array ().bfind (g, &i) && |
1998 | rangeRecord.arrayZ[i].value == klass) |
1999 | intersect_glyphs->add (g); |
2000 | } |
2001 | return; |
2002 | } |
2003 | |
2004 | for (auto &range : rangeRecord) |
2005 | { |
2006 | if (range.value != klass) continue; |
2007 | |
2008 | unsigned end = range.last + 1; |
2009 | for (hb_codepoint_t g = range.first - 1; |
2010 | glyphs->next (&g) && g < end;) |
2011 | intersect_glyphs->add (g); |
2012 | } |
2013 | } |
2014 | |
2015 | void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const |
2016 | { |
2017 | if (glyphs->is_empty ()) return; |
2018 | |
2019 | hb_codepoint_t g = HB_SET_VALUE_INVALID; |
2020 | for (auto &range : rangeRecord) |
2021 | { |
2022 | if (!glyphs->next (&g)) |
2023 | break; |
2024 | if (g < range.first) |
2025 | { |
2026 | intersect_classes->add (0); |
2027 | break; |
2028 | } |
2029 | g = range.last; |
2030 | } |
2031 | if (g != HB_SET_VALUE_INVALID && glyphs->next (&g)) |
2032 | intersect_classes->add (0); |
2033 | |
2034 | for (const auto& range : rangeRecord) |
2035 | if (range.intersects (*glyphs)) |
2036 | intersect_classes->add (range.value); |
2037 | } |
2038 | |
2039 | protected: |
2040 | HBUINT16 classFormat; /* Format identifier--format = 2 */ |
2041 | typename Types::template SortedArrayOf<RangeRecord<Types>> |
2042 | rangeRecord; /* Array of glyph ranges--ordered by |
2043 | * Start GlyphID */ |
2044 | public: |
2045 | DEFINE_SIZE_ARRAY (2 + Types::size, rangeRecord); |
2046 | }; |
2047 | |
2048 | struct ClassDef |
2049 | { |
2050 | /* Has interface. */ |
2051 | unsigned operator [] (hb_codepoint_t k) const { return get (k); } |
2052 | bool has (hb_codepoint_t k) const { return (*this)[k]; } |
2053 | /* Projection. */ |
2054 | hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); } |
2055 | |
2056 | unsigned int get (hb_codepoint_t k) const { return get_class (k); } |
2057 | unsigned int get_class (hb_codepoint_t glyph_id) const |
2058 | { |
2059 | switch (u.format) { |
2060 | case 1: return u.format1.get_class (glyph_id); |
2061 | case 2: return u.format2.get_class (glyph_id); |
2062 | #ifndef HB_NO_BEYOND_64K |
2063 | case 3: return u.format3.get_class (glyph_id); |
2064 | case 4: return u.format4.get_class (glyph_id); |
2065 | #endif |
2066 | default:return 0; |
2067 | } |
2068 | } |
2069 | |
2070 | unsigned get_population () const |
2071 | { |
2072 | switch (u.format) { |
2073 | case 1: return u.format1.get_population (); |
2074 | case 2: return u.format2.get_population (); |
2075 | #ifndef HB_NO_BEYOND_64K |
2076 | case 3: return u.format3.get_population (); |
2077 | case 4: return u.format4.get_population (); |
2078 | #endif |
2079 | default:return NOT_COVERED; |
2080 | } |
2081 | } |
2082 | |
2083 | template<typename Iterator, |
2084 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))> |
2085 | bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero) |
2086 | { |
2087 | TRACE_SERIALIZE (this); |
2088 | if (unlikely (!c->extend_min (this))) return_trace (false); |
2089 | |
2090 | auto it = + it_with_class_zero | hb_filter (hb_second); |
2091 | |
2092 | unsigned format = 2; |
2093 | hb_codepoint_t glyph_max = 0; |
2094 | if (likely (it)) |
2095 | { |
2096 | hb_codepoint_t glyph_min = (*it).first; |
2097 | glyph_max = glyph_min; |
2098 | |
2099 | unsigned num_glyphs = 0; |
2100 | unsigned num_ranges = 1; |
2101 | hb_codepoint_t prev_gid = glyph_min; |
2102 | unsigned prev_klass = (*it).second; |
2103 | |
2104 | for (const auto gid_klass_pair : it) |
2105 | { |
2106 | hb_codepoint_t cur_gid = gid_klass_pair.first; |
2107 | unsigned cur_klass = gid_klass_pair.second; |
2108 | num_glyphs++; |
2109 | if (cur_gid == glyph_min) continue; |
2110 | if (cur_gid > glyph_max) glyph_max = cur_gid; |
2111 | if (cur_gid != prev_gid + 1 || |
2112 | cur_klass != prev_klass) |
2113 | num_ranges++; |
2114 | |
2115 | prev_gid = cur_gid; |
2116 | prev_klass = cur_klass; |
2117 | } |
2118 | |
2119 | if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3) |
2120 | format = 1; |
2121 | } |
2122 | |
2123 | #ifndef HB_NO_BEYOND_64K |
2124 | if (glyph_max > 0xFFFFu) |
2125 | u.format += 2; |
2126 | if (unlikely (glyph_max > 0xFFFFFFu)) |
2127 | #else |
2128 | if (unlikely (glyph_max > 0xFFFFu)) |
2129 | #endif |
2130 | { |
2131 | c->check_success (false, HB_SERIALIZE_ERROR_INT_OVERFLOW); |
2132 | return_trace (false); |
2133 | } |
2134 | |
2135 | u.format = format; |
2136 | |
2137 | switch (u.format) |
2138 | { |
2139 | case 1: return_trace (u.format1.serialize (c, it)); |
2140 | case 2: return_trace (u.format2.serialize (c, it)); |
2141 | #ifndef HB_NO_BEYOND_64K |
2142 | case 3: return_trace (u.format3.serialize (c, it)); |
2143 | case 4: return_trace (u.format4.serialize (c, it)); |
2144 | #endif |
2145 | default:return_trace (false); |
2146 | } |
2147 | } |
2148 | |
2149 | bool subset (hb_subset_context_t *c, |
2150 | hb_map_t *klass_map = nullptr /*OUT*/, |
2151 | bool keep_empty_table = true, |
2152 | bool use_class_zero = true, |
2153 | const Coverage* glyph_filter = nullptr) const |
2154 | { |
2155 | TRACE_SUBSET (this); |
2156 | switch (u.format) { |
2157 | case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter)); |
2158 | case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter)); |
2159 | #ifndef HB_NO_BEYOND_64K |
2160 | case 3: return_trace (u.format3.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter)); |
2161 | case 4: return_trace (u.format4.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter)); |
2162 | #endif |
2163 | default:return_trace (false); |
2164 | } |
2165 | } |
2166 | |
2167 | bool sanitize (hb_sanitize_context_t *c) const |
2168 | { |
2169 | TRACE_SANITIZE (this); |
2170 | if (!u.format.sanitize (c)) return_trace (false); |
2171 | switch (u.format) { |
2172 | case 1: return_trace (u.format1.sanitize (c)); |
2173 | case 2: return_trace (u.format2.sanitize (c)); |
2174 | #ifndef HB_NO_BEYOND_64K |
2175 | case 3: return_trace (u.format3.sanitize (c)); |
2176 | case 4: return_trace (u.format4.sanitize (c)); |
2177 | #endif |
2178 | default:return_trace (true); |
2179 | } |
2180 | } |
2181 | |
2182 | unsigned cost () const |
2183 | { |
2184 | switch (u.format) { |
2185 | case 1: return u.format1.cost (); |
2186 | case 2: return u.format2.cost (); |
2187 | #ifndef HB_NO_BEYOND_64K |
2188 | case 3: return u.format3.cost (); |
2189 | case 4: return u.format4.cost (); |
2190 | #endif |
2191 | default:return 0u; |
2192 | } |
2193 | } |
2194 | |
2195 | /* Might return false if array looks unsorted. |
2196 | * Used for faster rejection of corrupt data. */ |
2197 | template <typename set_t> |
2198 | bool collect_coverage (set_t *glyphs) const |
2199 | { |
2200 | switch (u.format) { |
2201 | case 1: return u.format1.collect_coverage (glyphs); |
2202 | case 2: return u.format2.collect_coverage (glyphs); |
2203 | #ifndef HB_NO_BEYOND_64K |
2204 | case 3: return u.format3.collect_coverage (glyphs); |
2205 | case 4: return u.format4.collect_coverage (glyphs); |
2206 | #endif |
2207 | default:return false; |
2208 | } |
2209 | } |
2210 | |
2211 | /* Might return false if array looks unsorted. |
2212 | * Used for faster rejection of corrupt data. */ |
2213 | template <typename set_t> |
2214 | bool collect_class (set_t *glyphs, unsigned int klass) const |
2215 | { |
2216 | switch (u.format) { |
2217 | case 1: return u.format1.collect_class (glyphs, klass); |
2218 | case 2: return u.format2.collect_class (glyphs, klass); |
2219 | #ifndef HB_NO_BEYOND_64K |
2220 | case 3: return u.format3.collect_class (glyphs, klass); |
2221 | case 4: return u.format4.collect_class (glyphs, klass); |
2222 | #endif |
2223 | default:return false; |
2224 | } |
2225 | } |
2226 | |
2227 | bool intersects (const hb_set_t *glyphs) const |
2228 | { |
2229 | switch (u.format) { |
2230 | case 1: return u.format1.intersects (glyphs); |
2231 | case 2: return u.format2.intersects (glyphs); |
2232 | #ifndef HB_NO_BEYOND_64K |
2233 | case 3: return u.format3.intersects (glyphs); |
2234 | case 4: return u.format4.intersects (glyphs); |
2235 | #endif |
2236 | default:return false; |
2237 | } |
2238 | } |
2239 | bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const |
2240 | { |
2241 | switch (u.format) { |
2242 | case 1: return u.format1.intersects_class (glyphs, klass); |
2243 | case 2: return u.format2.intersects_class (glyphs, klass); |
2244 | #ifndef HB_NO_BEYOND_64K |
2245 | case 3: return u.format3.intersects_class (glyphs, klass); |
2246 | case 4: return u.format4.intersects_class (glyphs, klass); |
2247 | #endif |
2248 | default:return false; |
2249 | } |
2250 | } |
2251 | |
2252 | void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const |
2253 | { |
2254 | switch (u.format) { |
2255 | case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2256 | case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2257 | #ifndef HB_NO_BEYOND_64K |
2258 | case 3: return u.format3.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2259 | case 4: return u.format4.intersected_class_glyphs (glyphs, klass, intersect_glyphs); |
2260 | #endif |
2261 | default:return; |
2262 | } |
2263 | } |
2264 | |
2265 | void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const |
2266 | { |
2267 | switch (u.format) { |
2268 | case 1: return u.format1.intersected_classes (glyphs, intersect_classes); |
2269 | case 2: return u.format2.intersected_classes (glyphs, intersect_classes); |
2270 | #ifndef HB_NO_BEYOND_64K |
2271 | case 3: return u.format3.intersected_classes (glyphs, intersect_classes); |
2272 | case 4: return u.format4.intersected_classes (glyphs, intersect_classes); |
2273 | #endif |
2274 | default:return; |
2275 | } |
2276 | } |
2277 | |
2278 | |
2279 | protected: |
2280 | union { |
2281 | HBUINT16 format; /* Format identifier */ |
2282 | ClassDefFormat1_3<SmallTypes> format1; |
2283 | ClassDefFormat2_4<SmallTypes> format2; |
2284 | #ifndef HB_NO_BEYOND_64K |
2285 | ClassDefFormat1_3<MediumTypes>format3; |
2286 | ClassDefFormat2_4<MediumTypes>format4; |
2287 | #endif |
2288 | } u; |
2289 | public: |
2290 | DEFINE_SIZE_UNION (2, format); |
2291 | }; |
2292 | |
2293 | template<typename Iterator> |
2294 | static inline bool ClassDef_serialize (hb_serialize_context_t *c, |
2295 | Iterator it) |
2296 | { return (c->start_embed<ClassDef> ()->serialize (c, it)); } |
2297 | |
2298 | |
2299 | /* |
2300 | * Item Variation Store |
2301 | */ |
2302 | |
2303 | struct VarRegionAxis |
2304 | { |
2305 | float evaluate (int coord) const |
2306 | { |
2307 | int peak = peakCoord.to_int (); |
2308 | if (peak == 0 || coord == peak) |
2309 | return 1.f; |
2310 | |
2311 | int start = startCoord.to_int (), end = endCoord.to_int (); |
2312 | |
2313 | /* TODO Move these to sanitize(). */ |
2314 | if (unlikely (start > peak || peak > end)) |
2315 | return 1.f; |
2316 | if (unlikely (start < 0 && end > 0 && peak != 0)) |
2317 | return 1.f; |
2318 | |
2319 | if (coord <= start || end <= coord) |
2320 | return 0.f; |
2321 | |
2322 | /* Interpolate */ |
2323 | if (coord < peak) |
2324 | return float (coord - start) / (peak - start); |
2325 | else |
2326 | return float (end - coord) / (end - peak); |
2327 | } |
2328 | |
2329 | bool sanitize (hb_sanitize_context_t *c) const |
2330 | { |
2331 | TRACE_SANITIZE (this); |
2332 | return_trace (c->check_struct (this)); |
2333 | /* TODO Handle invalid start/peak/end configs, so we don't |
2334 | * have to do that at runtime. */ |
2335 | } |
2336 | |
2337 | public: |
2338 | F2DOT14 startCoord; |
2339 | F2DOT14 peakCoord; |
2340 | F2DOT14 endCoord; |
2341 | public: |
2342 | DEFINE_SIZE_STATIC (6); |
2343 | }; |
2344 | |
2345 | #define REGION_CACHE_ITEM_CACHE_INVALID 2.f |
2346 | |
2347 | struct VarRegionList |
2348 | { |
2349 | using cache_t = float; |
2350 | |
2351 | float evaluate (unsigned int region_index, |
2352 | const int *coords, unsigned int coord_len, |
2353 | cache_t *cache = nullptr) const |
2354 | { |
2355 | if (unlikely (region_index >= regionCount)) |
2356 | return 0.; |
2357 | |
2358 | float *cached_value = nullptr; |
2359 | if (cache) |
2360 | { |
2361 | cached_value = &(cache[region_index]); |
2362 | if (likely (*cached_value != REGION_CACHE_ITEM_CACHE_INVALID)) |
2363 | return *cached_value; |
2364 | } |
2365 | |
2366 | const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount); |
2367 | |
2368 | float v = 1.; |
2369 | unsigned int count = axisCount; |
2370 | for (unsigned int i = 0; i < count; i++) |
2371 | { |
2372 | int coord = i < coord_len ? coords[i] : 0; |
2373 | float factor = axes[i].evaluate (coord); |
2374 | if (factor == 0.f) |
2375 | { |
2376 | if (cache) |
2377 | *cached_value = 0.; |
2378 | return 0.; |
2379 | } |
2380 | v *= factor; |
2381 | } |
2382 | |
2383 | if (cache) |
2384 | *cached_value = v; |
2385 | return v; |
2386 | } |
2387 | |
2388 | bool sanitize (hb_sanitize_context_t *c) const |
2389 | { |
2390 | TRACE_SANITIZE (this); |
2391 | return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount)); |
2392 | } |
2393 | |
2394 | bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t ®ion_map) |
2395 | { |
2396 | TRACE_SERIALIZE (this); |
2397 | if (unlikely (!c->extend_min (this))) return_trace (false); |
2398 | axisCount = src->axisCount; |
2399 | regionCount = region_map.get_population (); |
2400 | if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount, |
2401 | VarRegionAxis::static_size))) return_trace (false); |
2402 | if (unlikely (!c->extend (this))) return_trace (false); |
2403 | unsigned int region_count = src->regionCount; |
2404 | for (unsigned int r = 0; r < regionCount; r++) |
2405 | { |
2406 | unsigned int backward = region_map.backward (r); |
2407 | if (backward >= region_count) return_trace (false); |
2408 | hb_memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount); |
2409 | } |
2410 | |
2411 | return_trace (true); |
2412 | } |
2413 | |
2414 | unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; } |
2415 | |
2416 | public: |
2417 | HBUINT16 axisCount; |
2418 | HBUINT15 regionCount; |
2419 | protected: |
2420 | UnsizedArrayOf<VarRegionAxis> |
2421 | axesZ; |
2422 | public: |
2423 | DEFINE_SIZE_ARRAY (4, axesZ); |
2424 | }; |
2425 | |
2426 | struct VarData |
2427 | { |
2428 | unsigned int get_item_count () const |
2429 | { return itemCount; } |
2430 | |
2431 | unsigned int get_region_index_count () const |
2432 | { return regionIndices.len; } |
2433 | |
2434 | unsigned int get_row_size () const |
2435 | { return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); } |
2436 | |
2437 | unsigned int get_size () const |
2438 | { return min_size |
2439 | - regionIndices.min_size + regionIndices.get_size () |
2440 | + itemCount * get_row_size (); |
2441 | } |
2442 | |
2443 | float get_delta (unsigned int inner, |
2444 | const int *coords, unsigned int coord_count, |
2445 | const VarRegionList ®ions, |
2446 | VarRegionList::cache_t *cache = nullptr) const |
2447 | { |
2448 | if (unlikely (inner >= itemCount)) |
2449 | return 0.; |
2450 | |
2451 | unsigned int count = regionIndices.len; |
2452 | bool is_long = longWords (); |
2453 | unsigned word_count = wordCount (); |
2454 | unsigned int scount = is_long ? count : word_count; |
2455 | unsigned int lcount = is_long ? word_count : 0; |
2456 | |
2457 | const HBUINT8 *bytes = get_delta_bytes (); |
2458 | const HBUINT8 *row = bytes + inner * get_row_size (); |
2459 | |
2460 | float delta = 0.; |
2461 | unsigned int i = 0; |
2462 | |
2463 | const HBINT32 *lcursor = reinterpret_cast<const HBINT32 *> (row); |
2464 | for (; i < lcount; i++) |
2465 | { |
2466 | float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache); |
2467 | delta += scalar * *lcursor++; |
2468 | } |
2469 | const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (lcursor); |
2470 | for (; i < scount; i++) |
2471 | { |
2472 | float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache); |
2473 | delta += scalar * *scursor++; |
2474 | } |
2475 | const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor); |
2476 | for (; i < count; i++) |
2477 | { |
2478 | float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache); |
2479 | delta += scalar * *bcursor++; |
2480 | } |
2481 | |
2482 | return delta; |
2483 | } |
2484 | |
2485 | void get_region_scalars (const int *coords, unsigned int coord_count, |
2486 | const VarRegionList ®ions, |
2487 | float *scalars /*OUT */, |
2488 | unsigned int num_scalars) const |
2489 | { |
2490 | unsigned count = hb_min (num_scalars, regionIndices.len); |
2491 | for (unsigned int i = 0; i < count; i++) |
2492 | scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count); |
2493 | for (unsigned int i = count; i < num_scalars; i++) |
2494 | scalars[i] = 0.f; |
2495 | } |
2496 | |
2497 | bool sanitize (hb_sanitize_context_t *c) const |
2498 | { |
2499 | TRACE_SANITIZE (this); |
2500 | return_trace (c->check_struct (this) && |
2501 | regionIndices.sanitize (c) && |
2502 | wordCount () <= regionIndices.len && |
2503 | c->check_range (get_delta_bytes (), |
2504 | itemCount, |
2505 | get_row_size ())); |
2506 | } |
2507 | |
2508 | bool serialize (hb_serialize_context_t *c, |
2509 | const VarData *src, |
2510 | const hb_inc_bimap_t &inner_map, |
2511 | const hb_inc_bimap_t ®ion_map) |
2512 | { |
2513 | TRACE_SERIALIZE (this); |
2514 | if (unlikely (!c->extend_min (this))) return_trace (false); |
2515 | itemCount = inner_map.get_next_value (); |
2516 | |
2517 | /* Optimize word count */ |
2518 | unsigned ri_count = src->regionIndices.len; |
2519 | enum delta_size_t { kZero=0, kNonWord, kWord }; |
2520 | hb_vector_t<delta_size_t> delta_sz; |
2521 | hb_vector_t<unsigned int> ri_map; /* maps new index to old index */ |
2522 | delta_sz.resize (ri_count); |
2523 | ri_map.resize (ri_count); |
2524 | unsigned int new_word_count = 0; |
2525 | unsigned int r; |
2526 | |
2527 | const HBUINT8 *src_delta_bytes = src->get_delta_bytes (); |
2528 | unsigned src_row_size = src->get_row_size (); |
2529 | unsigned src_word_count = src->wordCount (); |
2530 | bool src_long_words = src->longWords (); |
2531 | |
2532 | bool has_long = false; |
2533 | if (src_long_words) |
2534 | { |
2535 | for (r = 0; r < src_word_count; r++) |
2536 | { |
2537 | for (unsigned old_gid : inner_map.keys()) |
2538 | { |
2539 | int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size); |
2540 | if (delta < -65536 || 65535 < delta) |
2541 | { |
2542 | has_long = true; |
2543 | break; |
2544 | } |
2545 | } |
2546 | } |
2547 | } |
2548 | |
2549 | signed min_threshold = has_long ? -65536 : -128; |
2550 | signed max_threshold = has_long ? +65535 : +127; |
2551 | for (r = 0; r < ri_count; r++) |
2552 | { |
2553 | bool short_circuit = src_long_words == has_long && src_word_count <= r; |
2554 | |
2555 | delta_sz[r] = kZero; |
2556 | for (unsigned old_gid : inner_map.keys()) |
2557 | { |
2558 | int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size); |
2559 | if (delta < min_threshold || max_threshold < delta) |
2560 | { |
2561 | delta_sz[r] = kWord; |
2562 | new_word_count++; |
2563 | break; |
2564 | } |
2565 | else if (delta != 0) |
2566 | { |
2567 | delta_sz[r] = kNonWord; |
2568 | if (short_circuit) |
2569 | break; |
2570 | } |
2571 | } |
2572 | } |
2573 | |
2574 | unsigned int word_index = 0; |
2575 | unsigned int non_word_index = new_word_count; |
2576 | unsigned int new_ri_count = 0; |
2577 | for (r = 0; r < ri_count; r++) |
2578 | if (delta_sz[r]) |
2579 | { |
2580 | unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++; |
2581 | ri_map[new_r] = r; |
2582 | new_ri_count++; |
2583 | } |
2584 | |
2585 | wordSizeCount = new_word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0); |
2586 | |
2587 | regionIndices.len = new_ri_count; |
2588 | |
2589 | if (unlikely (!c->extend (this))) return_trace (false); |
2590 | |
2591 | for (r = 0; r < new_ri_count; r++) |
2592 | regionIndices[r] = region_map[src->regionIndices[ri_map[r]]]; |
2593 | |
2594 | HBUINT8 *delta_bytes = get_delta_bytes (); |
2595 | unsigned row_size = get_row_size (); |
2596 | unsigned count = itemCount; |
2597 | for (unsigned int i = 0; i < count; i++) |
2598 | { |
2599 | unsigned int old = inner_map.backward (i); |
2600 | for (unsigned int r = 0; r < new_ri_count; r++) |
2601 | set_item_delta_fast (i, r, |
2602 | src->get_item_delta_fast (old, ri_map[r], |
2603 | src_delta_bytes, src_row_size), |
2604 | delta_bytes, row_size); |
2605 | } |
2606 | |
2607 | return_trace (true); |
2608 | } |
2609 | |
2610 | void collect_region_refs (hb_set_t ®ion_indices, const hb_inc_bimap_t &inner_map) const |
2611 | { |
2612 | const HBUINT8 *delta_bytes = get_delta_bytes (); |
2613 | unsigned row_size = get_row_size (); |
2614 | |
2615 | for (unsigned int r = 0; r < regionIndices.len; r++) |
2616 | { |
2617 | unsigned int region = regionIndices.arrayZ[r]; |
2618 | if (region_indices.has (region)) continue; |
2619 | for (hb_codepoint_t old_gid : inner_map.keys()) |
2620 | if (get_item_delta_fast (old_gid, r, delta_bytes, row_size) != 0) |
2621 | { |
2622 | region_indices.add (region); |
2623 | break; |
2624 | } |
2625 | } |
2626 | } |
2627 | |
2628 | protected: |
2629 | const HBUINT8 *get_delta_bytes () const |
2630 | { return &StructAfter<HBUINT8> (regionIndices); } |
2631 | |
2632 | HBUINT8 *get_delta_bytes () |
2633 | { return &StructAfter<HBUINT8> (regionIndices); } |
2634 | |
2635 | int32_t get_item_delta_fast (unsigned int item, unsigned int region, |
2636 | const HBUINT8 *delta_bytes, unsigned row_size) const |
2637 | { |
2638 | if (unlikely (item >= itemCount || region >= regionIndices.len)) return 0; |
2639 | |
2640 | const HBINT8 *p = (const HBINT8 *) delta_bytes + item * row_size; |
2641 | unsigned word_count = wordCount (); |
2642 | bool is_long = longWords (); |
2643 | if (is_long) |
2644 | { |
2645 | if (region < word_count) |
2646 | return ((const HBINT32 *) p)[region]; |
2647 | else |
2648 | return ((const HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count]; |
2649 | } |
2650 | else |
2651 | { |
2652 | if (region < word_count) |
2653 | return ((const HBINT16 *) p)[region]; |
2654 | else |
2655 | return (p + HBINT16::static_size * word_count)[region - word_count]; |
2656 | } |
2657 | } |
2658 | int32_t get_item_delta (unsigned int item, unsigned int region) const |
2659 | { |
2660 | return get_item_delta_fast (item, region, |
2661 | get_delta_bytes (), |
2662 | get_row_size ()); |
2663 | } |
2664 | |
2665 | void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta, |
2666 | HBUINT8 *delta_bytes, unsigned row_size) |
2667 | { |
2668 | HBINT8 *p = (HBINT8 *) delta_bytes + item * row_size; |
2669 | unsigned word_count = wordCount (); |
2670 | bool is_long = longWords (); |
2671 | if (is_long) |
2672 | { |
2673 | if (region < word_count) |
2674 | ((HBINT32 *) p)[region] = delta; |
2675 | else |
2676 | ((HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count] = delta; |
2677 | } |
2678 | else |
2679 | { |
2680 | if (region < word_count) |
2681 | ((HBINT16 *) p)[region] = delta; |
2682 | else |
2683 | (p + HBINT16::static_size * word_count)[region - word_count] = delta; |
2684 | } |
2685 | } |
2686 | void set_item_delta (unsigned int item, unsigned int region, int32_t delta) |
2687 | { |
2688 | set_item_delta_fast (item, region, delta, |
2689 | get_delta_bytes (), |
2690 | get_row_size ()); |
2691 | } |
2692 | |
2693 | bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; } |
2694 | unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; } |
2695 | |
2696 | protected: |
2697 | HBUINT16 itemCount; |
2698 | HBUINT16 wordSizeCount; |
2699 | Array16Of<HBUINT16> regionIndices; |
2700 | /*UnsizedArrayOf<HBUINT8>bytesX;*/ |
2701 | public: |
2702 | DEFINE_SIZE_ARRAY (6, regionIndices); |
2703 | }; |
2704 | |
2705 | struct VariationStore |
2706 | { |
2707 | using cache_t = VarRegionList::cache_t; |
2708 | |
2709 | cache_t *create_cache () const |
2710 | { |
2711 | #ifdef HB_NO_VAR |
2712 | return nullptr; |
2713 | #endif |
2714 | auto &r = this+regions; |
2715 | unsigned count = r.regionCount; |
2716 | |
2717 | float *cache = (float *) hb_malloc (sizeof (float) * count); |
2718 | if (unlikely (!cache)) return nullptr; |
2719 | |
2720 | for (unsigned i = 0; i < count; i++) |
2721 | cache[i] = REGION_CACHE_ITEM_CACHE_INVALID; |
2722 | |
2723 | return cache; |
2724 | } |
2725 | |
2726 | static void destroy_cache (cache_t *cache) { hb_free (cache); } |
2727 | |
2728 | private: |
2729 | float get_delta (unsigned int outer, unsigned int inner, |
2730 | const int *coords, unsigned int coord_count, |
2731 | VarRegionList::cache_t *cache = nullptr) const |
2732 | { |
2733 | #ifdef HB_NO_VAR |
2734 | return 0.f; |
2735 | #endif |
2736 | |
2737 | if (unlikely (outer >= dataSets.len)) |
2738 | return 0.f; |
2739 | |
2740 | return (this+dataSets[outer]).get_delta (inner, |
2741 | coords, coord_count, |
2742 | this+regions, |
2743 | cache); |
2744 | } |
2745 | |
2746 | public: |
2747 | float get_delta (unsigned int index, |
2748 | const int *coords, unsigned int coord_count, |
2749 | VarRegionList::cache_t *cache = nullptr) const |
2750 | { |
2751 | unsigned int outer = index >> 16; |
2752 | unsigned int inner = index & 0xFFFF; |
2753 | return get_delta (outer, inner, coords, coord_count, cache); |
2754 | } |
2755 | float get_delta (unsigned int index, |
2756 | hb_array_t<int> coords, |
2757 | VarRegionList::cache_t *cache = nullptr) const |
2758 | { |
2759 | return get_delta (index, |
2760 | coords.arrayZ, coords.length, |
2761 | cache); |
2762 | } |
2763 | |
2764 | bool sanitize (hb_sanitize_context_t *c) const |
2765 | { |
2766 | #ifdef HB_NO_VAR |
2767 | return true; |
2768 | #endif |
2769 | |
2770 | TRACE_SANITIZE (this); |
2771 | return_trace (c->check_struct (this) && |
2772 | format == 1 && |
2773 | regions.sanitize (c, this) && |
2774 | dataSets.sanitize (c, this)); |
2775 | } |
2776 | |
2777 | bool serialize (hb_serialize_context_t *c, |
2778 | const VariationStore *src, |
2779 | const hb_array_t <const hb_inc_bimap_t> &inner_maps) |
2780 | { |
2781 | TRACE_SERIALIZE (this); |
2782 | #ifdef HB_NO_VAR |
2783 | return_trace (false); |
2784 | #endif |
2785 | |
2786 | if (unlikely (!c->extend_min (this))) return_trace (false); |
2787 | |
2788 | unsigned int set_count = 0; |
2789 | for (unsigned int i = 0; i < inner_maps.length; i++) |
2790 | if (inner_maps[i].get_population ()) |
2791 | set_count++; |
2792 | |
2793 | format = 1; |
2794 | |
2795 | const auto &src_regions = src+src->regions; |
2796 | |
2797 | hb_set_t region_indices; |
2798 | for (unsigned int i = 0; i < inner_maps.length; i++) |
2799 | (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]); |
2800 | |
2801 | if (region_indices.in_error ()) |
2802 | return_trace (false); |
2803 | |
2804 | region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID); |
2805 | |
2806 | /* TODO use constructor when our data-structures support that. */ |
2807 | hb_inc_bimap_t region_map; |
2808 | + hb_iter (region_indices) |
2809 | | hb_apply ([®ion_map] (unsigned _) { region_map.add(_); }) |
2810 | ; |
2811 | if (region_map.in_error()) |
2812 | return_trace (false); |
2813 | |
2814 | if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map))) |
2815 | return_trace (false); |
2816 | |
2817 | dataSets.len = set_count; |
2818 | if (unlikely (!c->extend (dataSets))) return_trace (false); |
2819 | |
2820 | /* TODO: The following code could be simplified when |
2821 | * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */ |
2822 | unsigned int set_index = 0; |
2823 | for (unsigned int i = 0; i < inner_maps.length; i++) |
2824 | { |
2825 | if (!inner_maps[i].get_population ()) continue; |
2826 | if (unlikely (!dataSets[set_index++] |
2827 | .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map))) |
2828 | return_trace (false); |
2829 | } |
2830 | |
2831 | return_trace (true); |
2832 | } |
2833 | |
2834 | VariationStore *copy (hb_serialize_context_t *c) const |
2835 | { |
2836 | TRACE_SERIALIZE (this); |
2837 | auto *out = c->start_embed (this); |
2838 | if (unlikely (!out)) return_trace (nullptr); |
2839 | |
2840 | hb_vector_t <hb_inc_bimap_t> inner_maps; |
2841 | unsigned count = dataSets.len; |
2842 | for (unsigned i = 0; i < count; i++) |
2843 | { |
2844 | hb_inc_bimap_t *map = inner_maps.push (); |
2845 | auto &data = this+dataSets[i]; |
2846 | |
2847 | unsigned itemCount = data.get_item_count (); |
2848 | for (unsigned j = 0; j < itemCount; j++) |
2849 | map->add (j); |
2850 | } |
2851 | |
2852 | if (unlikely (!out->serialize (c, this, inner_maps))) return_trace (nullptr); |
2853 | |
2854 | return_trace (out); |
2855 | } |
2856 | |
2857 | bool subset (hb_subset_context_t *c, const hb_array_t<const hb_inc_bimap_t> &inner_maps) const |
2858 | { |
2859 | TRACE_SUBSET (this); |
2860 | #ifdef HB_NO_VAR |
2861 | return_trace (false); |
2862 | #endif |
2863 | |
2864 | VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> (); |
2865 | if (unlikely (!varstore_prime)) return_trace (false); |
2866 | |
2867 | varstore_prime->serialize (c->serializer, this, inner_maps); |
2868 | |
2869 | return_trace ( |
2870 | !c->serializer->in_error() |
2871 | && varstore_prime->dataSets); |
2872 | } |
2873 | |
2874 | unsigned int get_region_index_count (unsigned int major) const |
2875 | { |
2876 | #ifdef HB_NO_VAR |
2877 | return 0; |
2878 | #endif |
2879 | return (this+dataSets[major]).get_region_index_count (); |
2880 | } |
2881 | |
2882 | void get_region_scalars (unsigned int major, |
2883 | const int *coords, unsigned int coord_count, |
2884 | float *scalars /*OUT*/, |
2885 | unsigned int num_scalars) const |
2886 | { |
2887 | #ifdef HB_NO_VAR |
2888 | for (unsigned i = 0; i < num_scalars; i++) |
2889 | scalars[i] = 0.f; |
2890 | return; |
2891 | #endif |
2892 | |
2893 | (this+dataSets[major]).get_region_scalars (coords, coord_count, |
2894 | this+regions, |
2895 | &scalars[0], num_scalars); |
2896 | } |
2897 | |
2898 | unsigned int get_sub_table_count () const |
2899 | { |
2900 | #ifdef HB_NO_VAR |
2901 | return 0; |
2902 | #endif |
2903 | return dataSets.len; |
2904 | } |
2905 | |
2906 | protected: |
2907 | HBUINT16 format; |
2908 | Offset32To<VarRegionList> regions; |
2909 | Array16OfOffset32To<VarData> dataSets; |
2910 | public: |
2911 | DEFINE_SIZE_ARRAY_SIZED (8, dataSets); |
2912 | }; |
2913 | |
2914 | #undef REGION_CACHE_ITEM_CACHE_INVALID |
2915 | |
2916 | /* |
2917 | * Feature Variations |
2918 | */ |
2919 | enum Cond_with_Var_flag_t |
2920 | { |
2921 | KEEP_COND_WITH_VAR = 0, |
2922 | KEEP_RECORD_WITH_VAR = 1, |
2923 | DROP_COND_WITH_VAR = 2, |
2924 | DROP_RECORD_WITH_VAR = 3, |
2925 | }; |
2926 | |
2927 | struct ConditionFormat1 |
2928 | { |
2929 | friend struct Condition; |
2930 | |
2931 | bool subset (hb_subset_context_t *c) const |
2932 | { |
2933 | TRACE_SUBSET (this); |
2934 | auto *out = c->serializer->embed (this); |
2935 | if (unlikely (!out)) return_trace (false); |
2936 | |
2937 | const hb_map_t *index_map = &c->plan->axes_index_map; |
2938 | if (index_map->is_empty ()) return_trace (true); |
2939 | |
2940 | const hb_map_t& axes_old_index_tag_map = c->plan->axes_old_index_tag_map; |
2941 | hb_codepoint_t *axis_tag; |
2942 | if (!axes_old_index_tag_map.has (axisIndex, &axis_tag) || |
2943 | !index_map->has (axisIndex)) |
2944 | return_trace (false); |
2945 | |
2946 | const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location = c->plan->axes_location; |
2947 | Triple axis_limit{-1.f, 0.f, 1.f}; |
2948 | Triple *normalized_limit; |
2949 | if (normalized_axes_location.has (*axis_tag, &normalized_limit)) |
2950 | axis_limit = *normalized_limit; |
2951 | |
2952 | const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances = c->plan->axes_triple_distances; |
2953 | TripleDistances axis_triple_distances{1.f, 1.f}; |
2954 | TripleDistances *triple_dists; |
2955 | if (axes_triple_distances.has (*axis_tag, &triple_dists)) |
2956 | axis_triple_distances = *triple_dists; |
2957 | |
2958 | float normalized_min = renormalizeValue (filterRangeMinValue.to_float (), axis_limit, axis_triple_distances, false); |
2959 | float normalized_max = renormalizeValue (filterRangeMaxValue.to_float (), axis_limit, axis_triple_distances, false); |
2960 | out->filterRangeMinValue.set_float (normalized_min); |
2961 | out->filterRangeMaxValue.set_float (normalized_max); |
2962 | |
2963 | return_trace (c->serializer->check_assign (out->axisIndex, index_map->get (axisIndex), |
2964 | HB_SERIALIZE_ERROR_INT_OVERFLOW)); |
2965 | } |
2966 | |
2967 | private: |
2968 | Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c, |
2969 | hb_map_t *condition_map /* OUT */) const |
2970 | { |
2971 | //invalid axis index, drop the entire record |
2972 | if (!c->axes_index_tag_map->has (axisIndex)) |
2973 | return DROP_RECORD_WITH_VAR; |
2974 | |
2975 | hb_tag_t axis_tag = c->axes_index_tag_map->get (axisIndex); |
2976 | |
2977 | Triple axis_range (-1.f, 0.f, 1.f); |
2978 | Triple *axis_limit; |
2979 | if (c->axes_location->has (axis_tag, &axis_limit)) |
2980 | axis_range = *axis_limit; |
2981 | |
2982 | float axis_min_val = axis_range.minimum; |
2983 | float axis_default_val = axis_range.middle; |
2984 | float axis_max_val = axis_range.maximum; |
2985 | |
2986 | float filter_min_val = filterRangeMinValue.to_float (); |
2987 | float filter_max_val = filterRangeMaxValue.to_float (); |
2988 | |
2989 | if (axis_default_val < filter_min_val || |
2990 | axis_default_val > filter_max_val) |
2991 | c->apply = false; |
2992 | |
2993 | //condition not met, drop the entire record |
2994 | if (axis_min_val > filter_max_val || axis_max_val < filter_min_val || |
2995 | filter_min_val > filter_max_val) |
2996 | return DROP_RECORD_WITH_VAR; |
2997 | |
2998 | //condition met and axis pinned, drop the condition |
2999 | if (c->axes_location->has (axis_tag) && |
3000 | c->axes_location->get (axis_tag).is_point ()) |
3001 | return DROP_COND_WITH_VAR; |
3002 | |
3003 | if (filter_max_val != axis_max_val || filter_min_val != axis_min_val) |
3004 | { |
3005 | // add axisIndex->value into the hashmap so we can check if the record is |
3006 | // unique with variations |
3007 | int16_t int_filter_max_val = filterRangeMaxValue.to_int (); |
3008 | int16_t int_filter_min_val = filterRangeMinValue.to_int (); |
3009 | hb_codepoint_t val = (int_filter_max_val << 16) + int_filter_min_val; |
3010 | |
3011 | condition_map->set (axisIndex, val); |
3012 | return KEEP_COND_WITH_VAR; |
3013 | } |
3014 | |
3015 | return KEEP_RECORD_WITH_VAR; |
3016 | } |
3017 | |
3018 | bool evaluate (const int *coords, unsigned int coord_len) const |
3019 | { |
3020 | int coord = axisIndex < coord_len ? coords[axisIndex] : 0; |
3021 | return filterRangeMinValue.to_int () <= coord && coord <= filterRangeMaxValue.to_int (); |
3022 | } |
3023 | |
3024 | bool sanitize (hb_sanitize_context_t *c) const |
3025 | { |
3026 | TRACE_SANITIZE (this); |
3027 | return_trace (c->check_struct (this)); |
3028 | } |
3029 | |
3030 | protected: |
3031 | HBUINT16 format; /* Format identifier--format = 1 */ |
3032 | HBUINT16 axisIndex; |
3033 | F2DOT14 filterRangeMinValue; |
3034 | F2DOT14 filterRangeMaxValue; |
3035 | public: |
3036 | DEFINE_SIZE_STATIC (8); |
3037 | }; |
3038 | |
3039 | struct Condition |
3040 | { |
3041 | bool evaluate (const int *coords, unsigned int coord_len) const |
3042 | { |
3043 | switch (u.format) { |
3044 | case 1: return u.format1.evaluate (coords, coord_len); |
3045 | default:return false; |
3046 | } |
3047 | } |
3048 | |
3049 | Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c, |
3050 | hb_map_t *condition_map /* OUT */) const |
3051 | { |
3052 | switch (u.format) { |
3053 | case 1: return u.format1.keep_with_variations (c, condition_map); |
3054 | default: c->apply = false; return KEEP_COND_WITH_VAR; |
3055 | } |
3056 | } |
3057 | |
3058 | template <typename context_t, typename ...Ts> |
3059 | typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const |
3060 | { |
3061 | if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value (); |
3062 | TRACE_DISPATCH (this, u.format); |
3063 | switch (u.format) { |
3064 | case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...)); |
3065 | default:return_trace (c->default_return_value ()); |
3066 | } |
3067 | } |
3068 | |
3069 | bool sanitize (hb_sanitize_context_t *c) const |
3070 | { |
3071 | TRACE_SANITIZE (this); |
3072 | if (!u.format.sanitize (c)) return_trace (false); |
3073 | switch (u.format) { |
3074 | case 1: return_trace (u.format1.sanitize (c)); |
3075 | default:return_trace (true); |
3076 | } |
3077 | } |
3078 | |
3079 | protected: |
3080 | union { |
3081 | HBUINT16 format; /* Format identifier */ |
3082 | ConditionFormat1 format1; |
3083 | } u; |
3084 | public: |
3085 | DEFINE_SIZE_UNION (2, format); |
3086 | }; |
3087 | |
3088 | struct ConditionSet |
3089 | { |
3090 | bool evaluate (const int *coords, unsigned int coord_len) const |
3091 | { |
3092 | unsigned int count = conditions.len; |
3093 | for (unsigned int i = 0; i < count; i++) |
3094 | if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len)) |
3095 | return false; |
3096 | return true; |
3097 | } |
3098 | |
3099 | void keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const |
3100 | { |
3101 | hb_map_t *condition_map = hb_map_create (); |
3102 | if (unlikely (!condition_map)) return; |
3103 | hb::shared_ptr<hb_map_t> p {condition_map}; |
3104 | |
3105 | hb_set_t *cond_set = hb_set_create (); |
3106 | if (unlikely (!cond_set)) return; |
3107 | hb::shared_ptr<hb_set_t> s {cond_set}; |
3108 | |
3109 | c->apply = true; |
3110 | bool should_keep = false; |
3111 | unsigned num_kept_cond = 0, cond_idx = 0; |
3112 | for (const auto& offset : conditions) |
3113 | { |
3114 | Cond_with_Var_flag_t ret = (this+offset).keep_with_variations (c, condition_map); |
3115 | // condition is not met or condition out of range, drop the entire record |
3116 | if (ret == DROP_RECORD_WITH_VAR) |
3117 | return; |
3118 | |
3119 | if (ret == KEEP_COND_WITH_VAR) |
3120 | { |
3121 | should_keep = true; |
3122 | cond_set->add (cond_idx); |
3123 | num_kept_cond++; |
3124 | } |
3125 | |
3126 | if (ret == KEEP_RECORD_WITH_VAR) |
3127 | should_keep = true; |
3128 | |
3129 | cond_idx++; |
3130 | } |
3131 | |
3132 | if (!should_keep) return; |
3133 | |
3134 | //check if condition_set is unique with variations |
3135 | if (c->conditionset_map->has (p)) |
3136 | //duplicate found, drop the entire record |
3137 | return; |
3138 | |
3139 | c->conditionset_map->set (p, 1); |
3140 | c->record_cond_idx_map->set (c->cur_record_idx, s); |
3141 | if (should_keep && num_kept_cond == 0) |
3142 | c->universal = true; |
3143 | } |
3144 | |
3145 | bool subset (hb_subset_context_t *c, |
3146 | hb_subset_layout_context_t *l) const |
3147 | { |
3148 | TRACE_SUBSET (this); |
3149 | auto *out = c->serializer->start_embed (this); |
3150 | if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); |
3151 | |
3152 | hb_set_t *retained_cond_set = nullptr; |
3153 | if (l->feature_record_cond_idx_map != nullptr) |
3154 | retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx); |
3155 | |
3156 | unsigned int count = conditions.len; |
3157 | for (unsigned int i = 0; i < count; i++) |
3158 | { |
3159 | if (retained_cond_set != nullptr && !retained_cond_set->has (i)) |
3160 | continue; |
3161 | subset_offset_array (c, out->conditions, this) (conditions[i]); |
3162 | } |
3163 | |
3164 | return_trace (bool (out->conditions)); |
3165 | } |
3166 | |
3167 | bool sanitize (hb_sanitize_context_t *c) const |
3168 | { |
3169 | TRACE_SANITIZE (this); |
3170 | return_trace (conditions.sanitize (c, this)); |
3171 | } |
3172 | |
3173 | protected: |
3174 | Array16OfOffset32To<Condition> conditions; |
3175 | public: |
3176 | DEFINE_SIZE_ARRAY (2, conditions); |
3177 | }; |
3178 | |
3179 | struct FeatureTableSubstitutionRecord |
3180 | { |
3181 | friend struct FeatureTableSubstitution; |
3182 | |
3183 | void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const |
3184 | { |
3185 | return (base+feature).add_lookup_indexes_to (lookup_indexes); |
3186 | } |
3187 | |
3188 | void closure_features (const void *base, |
3189 | const hb_map_t *lookup_indexes, |
3190 | hb_set_t *feature_indexes /* OUT */) const |
3191 | { |
3192 | if ((base+feature).intersects_lookup_indexes (lookup_indexes)) |
3193 | feature_indexes->add (featureIndex); |
3194 | } |
3195 | |
3196 | void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, |
3197 | const hb_set_t *feature_indices, |
3198 | const void *base) const |
3199 | { |
3200 | if (feature_indices->has (featureIndex)) |
3201 | feature_substitutes_map->set (featureIndex, &(base+feature)); |
3202 | } |
3203 | |
3204 | bool subset (hb_subset_layout_context_t *c, const void *base) const |
3205 | { |
3206 | TRACE_SUBSET (this); |
3207 | if (!c->feature_index_map->has (featureIndex) || |
3208 | c->feature_substitutes_map->has (featureIndex)) { |
3209 | // Feature that is being substituted is not being retained, so we don't |
3210 | // need this. |
3211 | return_trace (false); |
3212 | } |
3213 | |
3214 | auto *out = c->subset_context->serializer->embed (this); |
3215 | if (unlikely (!out)) return_trace (false); |
3216 | |
3217 | out->featureIndex = c->feature_index_map->get (featureIndex); |
3218 | bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c); |
3219 | return_trace (ret); |
3220 | } |
3221 | |
3222 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
3223 | { |
3224 | TRACE_SANITIZE (this); |
3225 | return_trace (c->check_struct (this) && feature.sanitize (c, base)); |
3226 | } |
3227 | |
3228 | protected: |
3229 | HBUINT16 featureIndex; |
3230 | Offset32To<Feature> feature; |
3231 | public: |
3232 | DEFINE_SIZE_STATIC (6); |
3233 | }; |
3234 | |
3235 | struct FeatureTableSubstitution |
3236 | { |
3237 | const Feature *find_substitute (unsigned int feature_index) const |
3238 | { |
3239 | unsigned int count = substitutions.len; |
3240 | for (unsigned int i = 0; i < count; i++) |
3241 | { |
3242 | const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i]; |
3243 | if (record.featureIndex == feature_index) |
3244 | return &(this+record.feature); |
3245 | } |
3246 | return nullptr; |
3247 | } |
3248 | |
3249 | void collect_lookups (const hb_set_t *feature_indexes, |
3250 | const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, |
3251 | hb_set_t *lookup_indexes /* OUT */) const |
3252 | { |
3253 | + hb_iter (substitutions) |
3254 | | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex) |
3255 | | hb_filter ([feature_substitutes_map] (const FeatureTableSubstitutionRecord& record) |
3256 | { |
3257 | if (feature_substitutes_map == nullptr) return true; |
3258 | return !feature_substitutes_map->has (record.featureIndex); |
3259 | }) |
3260 | | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r) |
3261 | { r.collect_lookups (this, lookup_indexes); }) |
3262 | ; |
3263 | } |
3264 | |
3265 | void closure_features (const hb_map_t *lookup_indexes, |
3266 | hb_set_t *feature_indexes /* OUT */) const |
3267 | { |
3268 | for (const FeatureTableSubstitutionRecord& record : substitutions) |
3269 | record.closure_features (this, lookup_indexes, feature_indexes); |
3270 | } |
3271 | |
3272 | bool intersects_features (const hb_map_t *feature_index_map) const |
3273 | { |
3274 | for (const FeatureTableSubstitutionRecord& record : substitutions) |
3275 | { |
3276 | if (feature_index_map->has (record.featureIndex)) return true; |
3277 | } |
3278 | return false; |
3279 | } |
3280 | |
3281 | void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const |
3282 | { |
3283 | for (const FeatureTableSubstitutionRecord& record : substitutions) |
3284 | record.collect_feature_substitutes_with_variations (c->feature_substitutes_map, c->feature_indices, this); |
3285 | } |
3286 | |
3287 | bool subset (hb_subset_context_t *c, |
3288 | hb_subset_layout_context_t *l) const |
3289 | { |
3290 | TRACE_SUBSET (this); |
3291 | auto *out = c->serializer->start_embed (*this); |
3292 | if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); |
3293 | |
3294 | out->version.major = version.major; |
3295 | out->version.minor = version.minor; |
3296 | |
3297 | + substitutions.iter () |
3298 | | hb_apply (subset_record_array (l, &(out->substitutions), this)) |
3299 | ; |
3300 | |
3301 | return_trace (bool (out->substitutions)); |
3302 | } |
3303 | |
3304 | bool sanitize (hb_sanitize_context_t *c) const |
3305 | { |
3306 | TRACE_SANITIZE (this); |
3307 | return_trace (version.sanitize (c) && |
3308 | likely (version.major == 1) && |
3309 | substitutions.sanitize (c, this)); |
3310 | } |
3311 | |
3312 | protected: |
3313 | FixedVersion<> version; /* Version--0x00010000u */ |
3314 | Array16Of<FeatureTableSubstitutionRecord> |
3315 | substitutions; |
3316 | public: |
3317 | DEFINE_SIZE_ARRAY (6, substitutions); |
3318 | }; |
3319 | |
3320 | struct FeatureVariationRecord |
3321 | { |
3322 | friend struct FeatureVariations; |
3323 | |
3324 | void collect_lookups (const void *base, |
3325 | const hb_set_t *feature_indexes, |
3326 | const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, |
3327 | hb_set_t *lookup_indexes /* OUT */) const |
3328 | { |
3329 | return (base+substitutions).collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes); |
3330 | } |
3331 | |
3332 | void closure_features (const void *base, |
3333 | const hb_map_t *lookup_indexes, |
3334 | hb_set_t *feature_indexes /* OUT */) const |
3335 | { |
3336 | (base+substitutions).closure_features (lookup_indexes, feature_indexes); |
3337 | } |
3338 | |
3339 | bool intersects_features (const void *base, const hb_map_t *feature_index_map) const |
3340 | { |
3341 | return (base+substitutions).intersects_features (feature_index_map); |
3342 | } |
3343 | |
3344 | void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c, |
3345 | const void *base) const |
3346 | { |
3347 | (base+conditions).keep_with_variations (c); |
3348 | if (c->apply && !c->variation_applied) |
3349 | { |
3350 | (base+substitutions).collect_feature_substitutes_with_variations (c); |
3351 | c->variation_applied = true; // set variations only once |
3352 | } |
3353 | } |
3354 | |
3355 | bool subset (hb_subset_layout_context_t *c, const void *base) const |
3356 | { |
3357 | TRACE_SUBSET (this); |
3358 | auto *out = c->subset_context->serializer->embed (this); |
3359 | if (unlikely (!out)) return_trace (false); |
3360 | |
3361 | out->conditions.serialize_subset (c->subset_context, conditions, base, c); |
3362 | out->substitutions.serialize_subset (c->subset_context, substitutions, base, c); |
3363 | |
3364 | return_trace (true); |
3365 | } |
3366 | |
3367 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
3368 | { |
3369 | TRACE_SANITIZE (this); |
3370 | return_trace (conditions.sanitize (c, base) && |
3371 | substitutions.sanitize (c, base)); |
3372 | } |
3373 | |
3374 | protected: |
3375 | Offset32To<ConditionSet> |
3376 | conditions; |
3377 | Offset32To<FeatureTableSubstitution> |
3378 | substitutions; |
3379 | public: |
3380 | DEFINE_SIZE_STATIC (8); |
3381 | }; |
3382 | |
3383 | struct FeatureVariations |
3384 | { |
3385 | static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu; |
3386 | |
3387 | bool find_index (const int *coords, unsigned int coord_len, |
3388 | unsigned int *index) const |
3389 | { |
3390 | unsigned int count = varRecords.len; |
3391 | for (unsigned int i = 0; i < count; i++) |
3392 | { |
3393 | const FeatureVariationRecord &record = varRecords.arrayZ[i]; |
3394 | if ((this+record.conditions).evaluate (coords, coord_len)) |
3395 | { |
3396 | *index = i; |
3397 | return true; |
3398 | } |
3399 | } |
3400 | *index = NOT_FOUND_INDEX; |
3401 | return false; |
3402 | } |
3403 | |
3404 | const Feature *find_substitute (unsigned int variations_index, |
3405 | unsigned int feature_index) const |
3406 | { |
3407 | const FeatureVariationRecord &record = varRecords[variations_index]; |
3408 | return (this+record.substitutions).find_substitute (feature_index); |
3409 | } |
3410 | |
3411 | void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const |
3412 | { |
3413 | unsigned int count = varRecords.len; |
3414 | for (unsigned int i = 0; i < count; i++) |
3415 | { |
3416 | c->cur_record_idx = i; |
3417 | varRecords[i].collect_feature_substitutes_with_variations (c, this); |
3418 | if (c->universal) |
3419 | break; |
3420 | } |
3421 | if (c->variation_applied && !c->universal && |
3422 | !c->record_cond_idx_map->is_empty ()) |
3423 | c->insert_catch_all_feature_variation_record = true; |
3424 | } |
3425 | |
3426 | FeatureVariations* copy (hb_serialize_context_t *c) const |
3427 | { |
3428 | TRACE_SERIALIZE (this); |
3429 | return_trace (c->embed (*this)); |
3430 | } |
3431 | |
3432 | void collect_lookups (const hb_set_t *feature_indexes, |
3433 | const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, |
3434 | hb_set_t *lookup_indexes /* OUT */) const |
3435 | { |
3436 | for (const FeatureVariationRecord& r : varRecords) |
3437 | r.collect_lookups (this, feature_indexes, feature_substitutes_map, lookup_indexes); |
3438 | } |
3439 | |
3440 | void closure_features (const hb_map_t *lookup_indexes, |
3441 | const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, |
3442 | hb_set_t *feature_indexes /* OUT */) const |
3443 | { |
3444 | unsigned int count = varRecords.len; |
3445 | for (unsigned int i = 0; i < count; i++) |
3446 | { |
3447 | if (feature_record_cond_idx_map != nullptr && |
3448 | !feature_record_cond_idx_map->has (i)) |
3449 | continue; |
3450 | varRecords[i].closure_features (this, lookup_indexes, feature_indexes); |
3451 | } |
3452 | } |
3453 | |
3454 | bool subset (hb_subset_context_t *c, |
3455 | hb_subset_layout_context_t *l) const |
3456 | { |
3457 | TRACE_SUBSET (this); |
3458 | auto *out = c->serializer->start_embed (*this); |
3459 | if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false); |
3460 | |
3461 | out->version.major = version.major; |
3462 | out->version.minor = version.minor; |
3463 | |
3464 | int keep_up_to = -1; |
3465 | for (int i = varRecords.len - 1; i >= 0; i--) { |
3466 | if (varRecords[i].intersects_features (this, l->feature_index_map)) { |
3467 | keep_up_to = i; |
3468 | break; |
3469 | } |
3470 | } |
3471 | |
3472 | unsigned count = (unsigned) (keep_up_to + 1); |
3473 | for (unsigned i = 0; i < count; i++) |
3474 | { |
3475 | if (l->feature_record_cond_idx_map != nullptr && |
3476 | !l->feature_record_cond_idx_map->has (i)) |
3477 | continue; |
3478 | |
3479 | l->cur_feature_var_record_idx = i; |
3480 | subset_record_array (l, &(out->varRecords), this) (varRecords[i]); |
3481 | } |
3482 | return_trace (bool (out->varRecords)); |
3483 | } |
3484 | |
3485 | bool sanitize (hb_sanitize_context_t *c) const |
3486 | { |
3487 | TRACE_SANITIZE (this); |
3488 | return_trace (version.sanitize (c) && |
3489 | likely (version.major == 1) && |
3490 | varRecords.sanitize (c, this)); |
3491 | } |
3492 | |
3493 | protected: |
3494 | FixedVersion<> version; /* Version--0x00010000u */ |
3495 | Array32Of<FeatureVariationRecord> |
3496 | varRecords; |
3497 | public: |
3498 | DEFINE_SIZE_ARRAY_SIZED (8, varRecords); |
3499 | }; |
3500 | |
3501 | |
3502 | /* |
3503 | * Device Tables |
3504 | */ |
3505 | |
3506 | struct HintingDevice |
3507 | { |
3508 | friend struct Device; |
3509 | |
3510 | private: |
3511 | |
3512 | hb_position_t get_x_delta (hb_font_t *font) const |
3513 | { return get_delta (font->x_ppem, font->x_scale); } |
3514 | |
3515 | hb_position_t get_y_delta (hb_font_t *font) const |
3516 | { return get_delta (font->y_ppem, font->y_scale); } |
3517 | |
3518 | public: |
3519 | |
3520 | unsigned int get_size () const |
3521 | { |
3522 | unsigned int f = deltaFormat; |
3523 | if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size; |
3524 | return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f))); |
3525 | } |
3526 | |
3527 | bool sanitize (hb_sanitize_context_t *c) const |
3528 | { |
3529 | TRACE_SANITIZE (this); |
3530 | return_trace (c->check_struct (this) && c->check_range (this, this->get_size ())); |
3531 | } |
3532 | |
3533 | HintingDevice* copy (hb_serialize_context_t *c) const |
3534 | { |
3535 | TRACE_SERIALIZE (this); |
3536 | return_trace (c->embed<HintingDevice> (this)); |
3537 | } |
3538 | |
3539 | private: |
3540 | |
3541 | int get_delta (unsigned int ppem, int scale) const |
3542 | { |
3543 | if (!ppem) return 0; |
3544 | |
3545 | int pixels = get_delta_pixels (ppem); |
3546 | |
3547 | if (!pixels) return 0; |
3548 | |
3549 | return (int) (pixels * (int64_t) scale / ppem); |
3550 | } |
3551 | int get_delta_pixels (unsigned int ppem_size) const |
3552 | { |
3553 | unsigned int f = deltaFormat; |
3554 | if (unlikely (f < 1 || f > 3)) |
3555 | return 0; |
3556 | |
3557 | if (ppem_size < startSize || ppem_size > endSize) |
3558 | return 0; |
3559 | |
3560 | unsigned int s = ppem_size - startSize; |
3561 | |
3562 | unsigned int byte = deltaValueZ[s >> (4 - f)]; |
3563 | unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f))); |
3564 | unsigned int mask = (0xFFFFu >> (16 - (1 << f))); |
3565 | |
3566 | int delta = bits & mask; |
3567 | |
3568 | if ((unsigned int) delta >= ((mask + 1) >> 1)) |
3569 | delta -= mask + 1; |
3570 | |
3571 | return delta; |
3572 | } |
3573 | |
3574 | protected: |
3575 | HBUINT16 startSize; /* Smallest size to correct--in ppem */ |
3576 | HBUINT16 endSize; /* Largest size to correct--in ppem */ |
3577 | HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3 |
3578 | * 1 Signed 2-bit value, 8 values per uint16 |
3579 | * 2 Signed 4-bit value, 4 values per uint16 |
3580 | * 3 Signed 8-bit value, 2 values per uint16 |
3581 | */ |
3582 | UnsizedArrayOf<HBUINT16> |
3583 | deltaValueZ; /* Array of compressed data */ |
3584 | public: |
3585 | DEFINE_SIZE_ARRAY (6, deltaValueZ); |
3586 | }; |
3587 | |
3588 | struct VariationDevice |
3589 | { |
3590 | friend struct Device; |
3591 | |
3592 | private: |
3593 | |
3594 | hb_position_t get_x_delta (hb_font_t *font, |
3595 | const VariationStore &store, |
3596 | VariationStore::cache_t *store_cache = nullptr) const |
3597 | { return font->em_scalef_x (get_delta (font, store, store_cache)); } |
3598 | |
3599 | hb_position_t get_y_delta (hb_font_t *font, |
3600 | const VariationStore &store, |
3601 | VariationStore::cache_t *store_cache = nullptr) const |
3602 | { return font->em_scalef_y (get_delta (font, store, store_cache)); } |
3603 | |
3604 | VariationDevice* copy (hb_serialize_context_t *c, |
3605 | const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const |
3606 | { |
3607 | TRACE_SERIALIZE (this); |
3608 | if (!layout_variation_idx_delta_map) return_trace (nullptr); |
3609 | |
3610 | hb_pair_t<unsigned, int> *v; |
3611 | if (!layout_variation_idx_delta_map->has (varIdx, &v)) |
3612 | return_trace (nullptr); |
3613 | |
3614 | c->start_zerocopy (this->static_size); |
3615 | auto *out = c->embed (this); |
3616 | if (unlikely (!out)) return_trace (nullptr); |
3617 | |
3618 | unsigned new_idx = hb_first (*v); |
3619 | out->varIdx = new_idx; |
3620 | return_trace (out); |
3621 | } |
3622 | |
3623 | void collect_variation_index (hb_collect_variation_indices_context_t *c) const |
3624 | { |
3625 | c->layout_variation_indices->add (varIdx); |
3626 | int delta = 0; |
3627 | if (c->normalized_coords && c->var_store) |
3628 | delta = roundf (c->var_store->get_delta (varIdx, c->normalized_coords->arrayZ, |
3629 | c->normalized_coords->length, c->store_cache)); |
3630 | |
3631 | /* set new varidx to HB_OT_LAYOUT_NO_VARIATIONS_INDEX here, will remap |
3632 | * varidx later*/ |
3633 | c->varidx_delta_map->set (varIdx, hb_pair_t<unsigned, int> (HB_OT_LAYOUT_NO_VARIATIONS_INDEX, delta)); |
3634 | } |
3635 | |
3636 | bool sanitize (hb_sanitize_context_t *c) const |
3637 | { |
3638 | TRACE_SANITIZE (this); |
3639 | return_trace (c->check_struct (this)); |
3640 | } |
3641 | |
3642 | private: |
3643 | |
3644 | float get_delta (hb_font_t *font, |
3645 | const VariationStore &store, |
3646 | VariationStore::cache_t *store_cache = nullptr) const |
3647 | { |
3648 | return store.get_delta (varIdx, font->coords, font->num_coords, (VariationStore::cache_t *) store_cache); |
3649 | } |
3650 | |
3651 | protected: |
3652 | VarIdx varIdx; |
3653 | HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */ |
3654 | public: |
3655 | DEFINE_SIZE_STATIC (6); |
3656 | }; |
3657 | |
3658 | struct |
3659 | { |
3660 | protected: |
3661 | HBUINT16 ; |
3662 | HBUINT16 ; |
3663 | public: |
3664 | HBUINT16 ; /* Format identifier */ |
3665 | public: |
3666 | DEFINE_SIZE_STATIC (6); |
3667 | }; |
3668 | |
3669 | struct Device |
3670 | { |
3671 | hb_position_t get_x_delta (hb_font_t *font, |
3672 | const VariationStore &store=Null (VariationStore), |
3673 | VariationStore::cache_t *store_cache = nullptr) const |
3674 | { |
3675 | switch (u.b.format) |
3676 | { |
3677 | #ifndef HB_NO_HINTING |
3678 | case 1: case 2: case 3: |
3679 | return u.hinting.get_x_delta (font); |
3680 | #endif |
3681 | #ifndef HB_NO_VAR |
3682 | case 0x8000: |
3683 | return u.variation.get_x_delta (font, store, store_cache); |
3684 | #endif |
3685 | default: |
3686 | return 0; |
3687 | } |
3688 | } |
3689 | hb_position_t get_y_delta (hb_font_t *font, |
3690 | const VariationStore &store=Null (VariationStore), |
3691 | VariationStore::cache_t *store_cache = nullptr) const |
3692 | { |
3693 | switch (u.b.format) |
3694 | { |
3695 | case 1: case 2: case 3: |
3696 | #ifndef HB_NO_HINTING |
3697 | return u.hinting.get_y_delta (font); |
3698 | #endif |
3699 | #ifndef HB_NO_VAR |
3700 | case 0x8000: |
3701 | return u.variation.get_y_delta (font, store, store_cache); |
3702 | #endif |
3703 | default: |
3704 | return 0; |
3705 | } |
3706 | } |
3707 | |
3708 | bool sanitize (hb_sanitize_context_t *c) const |
3709 | { |
3710 | TRACE_SANITIZE (this); |
3711 | if (!u.b.format.sanitize (c)) return_trace (false); |
3712 | switch (u.b.format) { |
3713 | #ifndef HB_NO_HINTING |
3714 | case 1: case 2: case 3: |
3715 | return_trace (u.hinting.sanitize (c)); |
3716 | #endif |
3717 | #ifndef HB_NO_VAR |
3718 | case 0x8000: |
3719 | return_trace (u.variation.sanitize (c)); |
3720 | #endif |
3721 | default: |
3722 | return_trace (true); |
3723 | } |
3724 | } |
3725 | |
3726 | Device* copy (hb_serialize_context_t *c, |
3727 | const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map=nullptr) const |
3728 | { |
3729 | TRACE_SERIALIZE (this); |
3730 | switch (u.b.format) { |
3731 | #ifndef HB_NO_HINTING |
3732 | case 1: |
3733 | case 2: |
3734 | case 3: |
3735 | return_trace (reinterpret_cast<Device *> (u.hinting.copy (c))); |
3736 | #endif |
3737 | #ifndef HB_NO_VAR |
3738 | case 0x8000: |
3739 | return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_delta_map))); |
3740 | #endif |
3741 | default: |
3742 | return_trace (nullptr); |
3743 | } |
3744 | } |
3745 | |
3746 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const |
3747 | { |
3748 | switch (u.b.format) { |
3749 | #ifndef HB_NO_HINTING |
3750 | case 1: |
3751 | case 2: |
3752 | case 3: |
3753 | return; |
3754 | #endif |
3755 | #ifndef HB_NO_VAR |
3756 | case 0x8000: |
3757 | u.variation.collect_variation_index (c); |
3758 | return; |
3759 | #endif |
3760 | default: |
3761 | return; |
3762 | } |
3763 | } |
3764 | |
3765 | unsigned get_variation_index () const |
3766 | { |
3767 | switch (u.b.format) { |
3768 | #ifndef HB_NO_VAR |
3769 | case 0x8000: |
3770 | return u.variation.varIdx; |
3771 | #endif |
3772 | default: |
3773 | return HB_OT_LAYOUT_NO_VARIATIONS_INDEX; |
3774 | } |
3775 | } |
3776 | |
3777 | protected: |
3778 | union { |
3779 | DeviceHeader b; |
3780 | HintingDevice hinting; |
3781 | #ifndef HB_NO_VAR |
3782 | VariationDevice variation; |
3783 | #endif |
3784 | } u; |
3785 | public: |
3786 | DEFINE_SIZE_UNION (6, b); |
3787 | }; |
3788 | |
3789 | |
3790 | } /* namespace OT */ |
3791 | |
3792 | |
3793 | #endif /* HB_OT_LAYOUT_COMMON_HH */ |
3794 | |