1 | /* |
2 | * Copyright © 2017 Google, Inc. |
3 | * |
4 | * This is part of HarfBuzz, a text shaping library. |
5 | * |
6 | * Permission is hereby granted, without written agreement and without |
7 | * license or royalty fees, to use, copy, modify, and distribute this |
8 | * software and its documentation for any purpose, provided that the |
9 | * above copyright notice and the following two paragraphs appear in |
10 | * all copies of this software. |
11 | * |
12 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
13 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
14 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
15 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
16 | * DAMAGE. |
17 | * |
18 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
19 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
20 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
21 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
22 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
23 | * |
24 | * Google Author(s): Behdad Esfahbod |
25 | */ |
26 | |
27 | #ifndef HB_AAT_LAYOUT_COMMON_HH |
28 | #define HB_AAT_LAYOUT_COMMON_HH |
29 | |
30 | #include "hb-aat-layout.hh" |
31 | #include "hb-open-type.hh" |
32 | |
33 | |
34 | namespace AAT { |
35 | |
36 | using namespace OT; |
37 | |
38 | |
39 | /* |
40 | * Lookup Table |
41 | */ |
42 | |
43 | template <typename T> struct Lookup; |
44 | |
45 | template <typename T> |
46 | struct LookupFormat0 |
47 | { |
48 | friend struct Lookup<T>; |
49 | |
50 | private: |
51 | const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
52 | { |
53 | if (unlikely (glyph_id >= num_glyphs)) return nullptr; |
54 | return &arrayZ[glyph_id]; |
55 | } |
56 | |
57 | bool sanitize (hb_sanitize_context_t *c) const |
58 | { |
59 | TRACE_SANITIZE (this); |
60 | return_trace (arrayZ.sanitize (c, c->get_num_glyphs ())); |
61 | } |
62 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
63 | { |
64 | TRACE_SANITIZE (this); |
65 | return_trace (arrayZ.sanitize (c, c->get_num_glyphs (), base)); |
66 | } |
67 | |
68 | protected: |
69 | HBUINT16 format; /* Format identifier--format = 0 */ |
70 | UnsizedArrayOf<T> |
71 | arrayZ; /* Array of lookup values, indexed by glyph index. */ |
72 | public: |
73 | DEFINE_SIZE_UNBOUNDED (2); |
74 | }; |
75 | |
76 | |
77 | template <typename T> |
78 | struct LookupSegmentSingle |
79 | { |
80 | static constexpr unsigned TerminationWordCount = 2u; |
81 | |
82 | int cmp (hb_codepoint_t g) const |
83 | { return g < first ? -1 : g <= last ? 0 : +1 ; } |
84 | |
85 | bool sanitize (hb_sanitize_context_t *c) const |
86 | { |
87 | TRACE_SANITIZE (this); |
88 | return_trace (c->check_struct (this) && value.sanitize (c)); |
89 | } |
90 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
91 | { |
92 | TRACE_SANITIZE (this); |
93 | return_trace (c->check_struct (this) && value.sanitize (c, base)); |
94 | } |
95 | |
96 | GlyphID last; /* Last GlyphID in this segment */ |
97 | GlyphID first; /* First GlyphID in this segment */ |
98 | T value; /* The lookup value (only one) */ |
99 | public: |
100 | DEFINE_SIZE_STATIC (4 + T::static_size); |
101 | }; |
102 | |
103 | template <typename T> |
104 | struct LookupFormat2 |
105 | { |
106 | friend struct Lookup<T>; |
107 | |
108 | private: |
109 | const T* get_value (hb_codepoint_t glyph_id) const |
110 | { |
111 | const LookupSegmentSingle<T> *v = segments.bsearch (glyph_id); |
112 | return v ? &v->value : nullptr; |
113 | } |
114 | |
115 | bool sanitize (hb_sanitize_context_t *c) const |
116 | { |
117 | TRACE_SANITIZE (this); |
118 | return_trace (segments.sanitize (c)); |
119 | } |
120 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
121 | { |
122 | TRACE_SANITIZE (this); |
123 | return_trace (segments.sanitize (c, base)); |
124 | } |
125 | |
126 | protected: |
127 | HBUINT16 format; /* Format identifier--format = 2 */ |
128 | VarSizedBinSearchArrayOf<LookupSegmentSingle<T> > |
129 | segments; /* The actual segments. These must already be sorted, |
130 | * according to the first word in each one (the last |
131 | * glyph in each segment). */ |
132 | public: |
133 | DEFINE_SIZE_ARRAY (8, segments); |
134 | }; |
135 | |
136 | template <typename T> |
137 | struct LookupSegmentArray |
138 | { |
139 | static constexpr unsigned TerminationWordCount = 2u; |
140 | |
141 | const T* get_value (hb_codepoint_t glyph_id, const void *base) const |
142 | { |
143 | return first <= glyph_id && glyph_id <= last ? &(base+valuesZ)[glyph_id - first] : nullptr; |
144 | } |
145 | |
146 | int cmp (hb_codepoint_t g) const |
147 | { return g < first ? -1 : g <= last ? 0 : +1; } |
148 | |
149 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
150 | { |
151 | TRACE_SANITIZE (this); |
152 | return_trace (c->check_struct (this) && |
153 | first <= last && |
154 | valuesZ.sanitize (c, base, last - first + 1)); |
155 | } |
156 | template <typename T2> |
157 | bool sanitize (hb_sanitize_context_t *c, const void *base, T2 user_data) const |
158 | { |
159 | TRACE_SANITIZE (this); |
160 | return_trace (c->check_struct (this) && |
161 | first <= last && |
162 | valuesZ.sanitize (c, base, last - first + 1, user_data)); |
163 | } |
164 | |
165 | GlyphID last; /* Last GlyphID in this segment */ |
166 | GlyphID first; /* First GlyphID in this segment */ |
167 | NNOffsetTo<UnsizedArrayOf<T> > |
168 | valuesZ; /* A 16-bit offset from the start of |
169 | * the table to the data. */ |
170 | public: |
171 | DEFINE_SIZE_STATIC (6); |
172 | }; |
173 | |
174 | template <typename T> |
175 | struct LookupFormat4 |
176 | { |
177 | friend struct Lookup<T>; |
178 | |
179 | private: |
180 | const T* get_value (hb_codepoint_t glyph_id) const |
181 | { |
182 | const LookupSegmentArray<T> *v = segments.bsearch (glyph_id); |
183 | return v ? v->get_value (glyph_id, this) : nullptr; |
184 | } |
185 | |
186 | bool sanitize (hb_sanitize_context_t *c) const |
187 | { |
188 | TRACE_SANITIZE (this); |
189 | return_trace (segments.sanitize (c, this)); |
190 | } |
191 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
192 | { |
193 | TRACE_SANITIZE (this); |
194 | return_trace (segments.sanitize (c, this, base)); |
195 | } |
196 | |
197 | protected: |
198 | HBUINT16 format; /* Format identifier--format = 4 */ |
199 | VarSizedBinSearchArrayOf<LookupSegmentArray<T> > |
200 | segments; /* The actual segments. These must already be sorted, |
201 | * according to the first word in each one (the last |
202 | * glyph in each segment). */ |
203 | public: |
204 | DEFINE_SIZE_ARRAY (8, segments); |
205 | }; |
206 | |
207 | template <typename T> |
208 | struct LookupSingle |
209 | { |
210 | static constexpr unsigned TerminationWordCount = 1u; |
211 | |
212 | int cmp (hb_codepoint_t g) const { return glyph.cmp (g); } |
213 | |
214 | bool sanitize (hb_sanitize_context_t *c) const |
215 | { |
216 | TRACE_SANITIZE (this); |
217 | return_trace (c->check_struct (this) && value.sanitize (c)); |
218 | } |
219 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
220 | { |
221 | TRACE_SANITIZE (this); |
222 | return_trace (c->check_struct (this) && value.sanitize (c, base)); |
223 | } |
224 | |
225 | GlyphID glyph; /* Last GlyphID */ |
226 | T value; /* The lookup value (only one) */ |
227 | public: |
228 | DEFINE_SIZE_STATIC (2 + T::static_size); |
229 | }; |
230 | |
231 | template <typename T> |
232 | struct LookupFormat6 |
233 | { |
234 | friend struct Lookup<T>; |
235 | |
236 | private: |
237 | const T* get_value (hb_codepoint_t glyph_id) const |
238 | { |
239 | const LookupSingle<T> *v = entries.bsearch (glyph_id); |
240 | return v ? &v->value : nullptr; |
241 | } |
242 | |
243 | bool sanitize (hb_sanitize_context_t *c) const |
244 | { |
245 | TRACE_SANITIZE (this); |
246 | return_trace (entries.sanitize (c)); |
247 | } |
248 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
249 | { |
250 | TRACE_SANITIZE (this); |
251 | return_trace (entries.sanitize (c, base)); |
252 | } |
253 | |
254 | protected: |
255 | HBUINT16 format; /* Format identifier--format = 6 */ |
256 | VarSizedBinSearchArrayOf<LookupSingle<T> > |
257 | entries; /* The actual entries, sorted by glyph index. */ |
258 | public: |
259 | DEFINE_SIZE_ARRAY (8, entries); |
260 | }; |
261 | |
262 | template <typename T> |
263 | struct LookupFormat8 |
264 | { |
265 | friend struct Lookup<T>; |
266 | |
267 | private: |
268 | const T* get_value (hb_codepoint_t glyph_id) const |
269 | { |
270 | return firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount ? |
271 | &valueArrayZ[glyph_id - firstGlyph] : nullptr; |
272 | } |
273 | |
274 | bool sanitize (hb_sanitize_context_t *c) const |
275 | { |
276 | TRACE_SANITIZE (this); |
277 | return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount)); |
278 | } |
279 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
280 | { |
281 | TRACE_SANITIZE (this); |
282 | return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount, base)); |
283 | } |
284 | |
285 | protected: |
286 | HBUINT16 format; /* Format identifier--format = 8 */ |
287 | GlyphID firstGlyph; /* First glyph index included in the trimmed array. */ |
288 | HBUINT16 glyphCount; /* Total number of glyphs (equivalent to the last |
289 | * glyph minus the value of firstGlyph plus 1). */ |
290 | UnsizedArrayOf<T> |
291 | valueArrayZ; /* The lookup values (indexed by the glyph index |
292 | * minus the value of firstGlyph). */ |
293 | public: |
294 | DEFINE_SIZE_ARRAY (6, valueArrayZ); |
295 | }; |
296 | |
297 | template <typename T> |
298 | struct LookupFormat10 |
299 | { |
300 | friend struct Lookup<T>; |
301 | |
302 | private: |
303 | const typename T::type get_value_or_null (hb_codepoint_t glyph_id) const |
304 | { |
305 | if (!(firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount)) |
306 | return Null(T); |
307 | |
308 | const HBUINT8 *p = &valueArrayZ[(glyph_id - firstGlyph) * valueSize]; |
309 | |
310 | unsigned int v = 0; |
311 | unsigned int count = valueSize; |
312 | for (unsigned int i = 0; i < count; i++) |
313 | v = (v << 8) | *p++; |
314 | |
315 | return v; |
316 | } |
317 | |
318 | bool sanitize (hb_sanitize_context_t *c) const |
319 | { |
320 | TRACE_SANITIZE (this); |
321 | return_trace (c->check_struct (this) && |
322 | valueSize <= 4 && |
323 | valueArrayZ.sanitize (c, glyphCount * valueSize)); |
324 | } |
325 | |
326 | protected: |
327 | HBUINT16 format; /* Format identifier--format = 8 */ |
328 | HBUINT16 valueSize; /* Byte size of each value. */ |
329 | GlyphID firstGlyph; /* First glyph index included in the trimmed array. */ |
330 | HBUINT16 glyphCount; /* Total number of glyphs (equivalent to the last |
331 | * glyph minus the value of firstGlyph plus 1). */ |
332 | UnsizedArrayOf<HBUINT8> |
333 | valueArrayZ; /* The lookup values (indexed by the glyph index |
334 | * minus the value of firstGlyph). */ |
335 | public: |
336 | DEFINE_SIZE_ARRAY (8, valueArrayZ); |
337 | }; |
338 | |
339 | template <typename T> |
340 | struct Lookup |
341 | { |
342 | const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
343 | { |
344 | switch (u.format) { |
345 | case 0: return u.format0.get_value (glyph_id, num_glyphs); |
346 | case 2: return u.format2.get_value (glyph_id); |
347 | case 4: return u.format4.get_value (glyph_id); |
348 | case 6: return u.format6.get_value (glyph_id); |
349 | case 8: return u.format8.get_value (glyph_id); |
350 | default:return nullptr; |
351 | } |
352 | } |
353 | |
354 | const typename T::type get_value_or_null (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
355 | { |
356 | switch (u.format) { |
357 | /* Format 10 cannot return a pointer. */ |
358 | case 10: return u.format10.get_value_or_null (glyph_id); |
359 | default: |
360 | const T *v = get_value (glyph_id, num_glyphs); |
361 | return v ? *v : Null(T); |
362 | } |
363 | } |
364 | |
365 | typename T::type get_class (hb_codepoint_t glyph_id, |
366 | unsigned int num_glyphs, |
367 | unsigned int outOfRange) const |
368 | { |
369 | const T *v = get_value (glyph_id, num_glyphs); |
370 | return v ? *v : outOfRange; |
371 | } |
372 | |
373 | bool sanitize (hb_sanitize_context_t *c) const |
374 | { |
375 | TRACE_SANITIZE (this); |
376 | if (!u.format.sanitize (c)) return_trace (false); |
377 | switch (u.format) { |
378 | case 0: return_trace (u.format0.sanitize (c)); |
379 | case 2: return_trace (u.format2.sanitize (c)); |
380 | case 4: return_trace (u.format4.sanitize (c)); |
381 | case 6: return_trace (u.format6.sanitize (c)); |
382 | case 8: return_trace (u.format8.sanitize (c)); |
383 | case 10: return_trace (u.format10.sanitize (c)); |
384 | default:return_trace (true); |
385 | } |
386 | } |
387 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
388 | { |
389 | TRACE_SANITIZE (this); |
390 | if (!u.format.sanitize (c)) return_trace (false); |
391 | switch (u.format) { |
392 | case 0: return_trace (u.format0.sanitize (c, base)); |
393 | case 2: return_trace (u.format2.sanitize (c, base)); |
394 | case 4: return_trace (u.format4.sanitize (c, base)); |
395 | case 6: return_trace (u.format6.sanitize (c, base)); |
396 | case 8: return_trace (u.format8.sanitize (c, base)); |
397 | case 10: return_trace (false); /* We don't support format10 here currently. */ |
398 | default:return_trace (true); |
399 | } |
400 | } |
401 | |
402 | protected: |
403 | union { |
404 | HBUINT16 format; /* Format identifier */ |
405 | LookupFormat0<T> format0; |
406 | LookupFormat2<T> format2; |
407 | LookupFormat4<T> format4; |
408 | LookupFormat6<T> format6; |
409 | LookupFormat8<T> format8; |
410 | LookupFormat10<T> format10; |
411 | } u; |
412 | public: |
413 | DEFINE_SIZE_UNION (2, format); |
414 | }; |
415 | /* Lookup 0 has unbounded size (dependant on num_glyphs). So we need to defined |
416 | * special NULL objects for Lookup<> objects, but since it's template our macros |
417 | * don't work. So we have to hand-code them here. UGLY. */ |
418 | } /* Close namespace. */ |
419 | /* Ugly hand-coded null objects for template Lookup<> :(. */ |
420 | extern HB_INTERNAL const unsigned char _hb_Null_AAT_Lookup[2]; |
421 | template <> |
422 | /*static*/ inline const AAT::Lookup<OT::HBUINT16>& Null<AAT::Lookup<OT::HBUINT16> > () |
423 | { return *reinterpret_cast<const AAT::Lookup<OT::HBUINT16> *> (_hb_Null_AAT_Lookup); } |
424 | template <> |
425 | /*static*/ inline const AAT::Lookup<OT::HBUINT32>& Null<AAT::Lookup<OT::HBUINT32> > () |
426 | { return *reinterpret_cast<const AAT::Lookup<OT::HBUINT32> *> (_hb_Null_AAT_Lookup); } |
427 | template <> |
428 | /*static*/ inline const AAT::Lookup<OT::Offset<OT::HBUINT16, false> >& Null<AAT::Lookup<OT::Offset<OT::HBUINT16, false> > > () |
429 | { return *reinterpret_cast<const AAT::Lookup<OT::Offset<OT::HBUINT16, false> > *> (_hb_Null_AAT_Lookup); } |
430 | namespace AAT { |
431 | |
432 | enum { DELETED_GLYPH = 0xFFFF }; |
433 | |
434 | /* |
435 | * (Extended) State Table |
436 | */ |
437 | |
438 | template <typename T> |
439 | struct Entry |
440 | { |
441 | bool sanitize (hb_sanitize_context_t *c, unsigned int count) const |
442 | { |
443 | TRACE_SANITIZE (this); |
444 | /* Note, we don't recurse-sanitize data because we don't access it. |
445 | * That said, in our DEFINE_SIZE_STATIC we access T::static_size, |
446 | * which ensures that data has a simple sanitize(). To be determined |
447 | * if I need to remove that as well. |
448 | * |
449 | * HOWEVER! Because we are a template, our DEFINE_SIZE_STATIC |
450 | * assertion wouldn't be checked, hence the line below. */ |
451 | static_assert (T::static_size, "" ); |
452 | |
453 | return_trace (c->check_struct (this)); |
454 | } |
455 | |
456 | public: |
457 | HBUINT16 newState; /* Byte offset from beginning of state table |
458 | * to the new state. Really?!?! Or just state |
459 | * number? The latter in morx for sure. */ |
460 | HBUINT16 flags; /* Table specific. */ |
461 | T data; /* Optional offsets to per-glyph tables. */ |
462 | public: |
463 | DEFINE_SIZE_STATIC (4 + T::static_size); |
464 | }; |
465 | |
466 | template <> |
467 | struct Entry<void> |
468 | { |
469 | bool sanitize (hb_sanitize_context_t *c, unsigned int count /*XXX Unused?*/) const |
470 | { |
471 | TRACE_SANITIZE (this); |
472 | return_trace (c->check_struct (this)); |
473 | } |
474 | |
475 | public: |
476 | HBUINT16 newState; /* Byte offset from beginning of state table to the new state. */ |
477 | HBUINT16 flags; /* Table specific. */ |
478 | public: |
479 | DEFINE_SIZE_STATIC (4); |
480 | }; |
481 | |
482 | template <typename Types, typename Extra> |
483 | struct StateTable |
484 | { |
485 | typedef typename Types::HBUINT HBUINT; |
486 | typedef typename Types::HBUSHORT HBUSHORT; |
487 | typedef typename Types::ClassTypeNarrow ClassType; |
488 | |
489 | enum State |
490 | { |
491 | STATE_START_OF_TEXT = 0, |
492 | STATE_START_OF_LINE = 1, |
493 | }; |
494 | enum Class |
495 | { |
496 | CLASS_END_OF_TEXT = 0, |
497 | CLASS_OUT_OF_BOUNDS = 1, |
498 | CLASS_DELETED_GLYPH = 2, |
499 | CLASS_END_OF_LINE = 3, |
500 | }; |
501 | |
502 | int new_state (unsigned int newState) const |
503 | { return Types::extended ? newState : ((int) newState - (int) stateArrayTable) / (int) nClasses; } |
504 | |
505 | unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
506 | { |
507 | if (unlikely (glyph_id == DELETED_GLYPH)) return CLASS_DELETED_GLYPH; |
508 | return (this+classTable).get_class (glyph_id, num_glyphs, 1); |
509 | } |
510 | |
511 | const Entry<Extra> *get_entries () const |
512 | { return (this+entryTable).arrayZ; } |
513 | |
514 | const Entry<Extra> &get_entry (int state, unsigned int klass) const |
515 | { |
516 | if (unlikely (klass >= nClasses)) |
517 | klass = StateTable<Types, Entry<Extra> >::CLASS_OUT_OF_BOUNDS; |
518 | |
519 | const HBUSHORT *states = (this+stateArrayTable).arrayZ; |
520 | const Entry<Extra> *entries = (this+entryTable).arrayZ; |
521 | |
522 | unsigned int entry = states[state * nClasses + klass]; |
523 | DEBUG_MSG (APPLY, nullptr, "e%u" , entry); |
524 | |
525 | return entries[entry]; |
526 | } |
527 | |
528 | bool sanitize (hb_sanitize_context_t *c, |
529 | unsigned int *num_entries_out = nullptr) const |
530 | { |
531 | TRACE_SANITIZE (this); |
532 | if (unlikely (!(c->check_struct (this) && |
533 | nClasses >= 4 /* Ensure pre-defined classes fit. */ && |
534 | classTable.sanitize (c, this)))) return_trace (false); |
535 | |
536 | const HBUSHORT *states = (this+stateArrayTable).arrayZ; |
537 | const Entry<Extra> *entries = (this+entryTable).arrayZ; |
538 | |
539 | unsigned int num_classes = nClasses; |
540 | if (unlikely (hb_unsigned_mul_overflows (num_classes, states[0].static_size))) |
541 | return_trace (false); |
542 | unsigned int row_stride = num_classes * states[0].static_size; |
543 | |
544 | /* Apple 'kern' table has this peculiarity: |
545 | * |
546 | * "Because the stateTableOffset in the state table header is (strictly |
547 | * speaking) redundant, some 'kern' tables use it to record an initial |
548 | * state where that should not be StartOfText. To determine if this is |
549 | * done, calculate what the stateTableOffset should be. If it's different |
550 | * from the actual stateTableOffset, use it as the initial state." |
551 | * |
552 | * We implement this by calling the initial state zero, but allow *negative* |
553 | * states if the start state indeed was not the first state. Since the code |
554 | * is shared, this will also apply to 'mort' table. The 'kerx' / 'morx' |
555 | * tables are not affected since those address states by index, not offset. |
556 | */ |
557 | |
558 | int min_state = 0; |
559 | int max_state = 0; |
560 | unsigned int num_entries = 0; |
561 | |
562 | int state_pos = 0; |
563 | int state_neg = 0; |
564 | unsigned int entry = 0; |
565 | while (min_state < state_neg || state_pos <= max_state) |
566 | { |
567 | if (min_state < state_neg) |
568 | { |
569 | /* Negative states. */ |
570 | if (unlikely (hb_unsigned_mul_overflows (min_state, num_classes))) |
571 | return_trace (false); |
572 | if (unlikely (!c->check_range (&states[min_state * num_classes], |
573 | -min_state, |
574 | row_stride))) |
575 | return_trace (false); |
576 | if ((c->max_ops -= state_neg - min_state) <= 0) |
577 | return_trace (false); |
578 | { /* Sweep new states. */ |
579 | const HBUSHORT *stop = &states[min_state * num_classes]; |
580 | if (unlikely (stop > states)) |
581 | return_trace (false); |
582 | for (const HBUSHORT *p = states; stop < p; p--) |
583 | num_entries = MAX<unsigned int> (num_entries, *(p - 1) + 1); |
584 | state_neg = min_state; |
585 | } |
586 | } |
587 | |
588 | if (state_pos <= max_state) |
589 | { |
590 | /* Positive states. */ |
591 | if (unlikely (!c->check_range (states, |
592 | max_state + 1, |
593 | row_stride))) |
594 | return_trace (false); |
595 | if ((c->max_ops -= max_state - state_pos + 1) <= 0) |
596 | return_trace (false); |
597 | { /* Sweep new states. */ |
598 | if (unlikely (hb_unsigned_mul_overflows ((max_state + 1), num_classes))) |
599 | return_trace (false); |
600 | const HBUSHORT *stop = &states[(max_state + 1) * num_classes]; |
601 | if (unlikely (stop < states)) |
602 | return_trace (false); |
603 | for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++) |
604 | num_entries = MAX<unsigned int> (num_entries, *p + 1); |
605 | state_pos = max_state + 1; |
606 | } |
607 | } |
608 | |
609 | if (unlikely (!c->check_array (entries, num_entries))) |
610 | return_trace (false); |
611 | if ((c->max_ops -= num_entries - entry) <= 0) |
612 | return_trace (false); |
613 | { /* Sweep new entries. */ |
614 | const Entry<Extra> *stop = &entries[num_entries]; |
615 | for (const Entry<Extra> *p = &entries[entry]; p < stop; p++) |
616 | { |
617 | int newState = new_state (p->newState); |
618 | min_state = MIN (min_state, newState); |
619 | max_state = MAX (max_state, newState); |
620 | } |
621 | entry = num_entries; |
622 | } |
623 | } |
624 | |
625 | if (num_entries_out) |
626 | *num_entries_out = num_entries; |
627 | |
628 | return_trace (true); |
629 | } |
630 | |
631 | protected: |
632 | HBUINT nClasses; /* Number of classes, which is the number of indices |
633 | * in a single line in the state array. */ |
634 | NNOffsetTo<ClassType, HBUINT> |
635 | classTable; /* Offset to the class table. */ |
636 | NNOffsetTo<UnsizedArrayOf<HBUSHORT>, HBUINT> |
637 | stateArrayTable;/* Offset to the state array. */ |
638 | NNOffsetTo<UnsizedArrayOf<Entry<Extra> >, HBUINT> |
639 | entryTable; /* Offset to the entry array. */ |
640 | |
641 | public: |
642 | DEFINE_SIZE_STATIC (4 * sizeof (HBUINT)); |
643 | }; |
644 | |
645 | template <typename HBUCHAR> |
646 | struct ClassTable |
647 | { |
648 | unsigned int get_class (hb_codepoint_t glyph_id, unsigned int outOfRange) const |
649 | { |
650 | unsigned int i = glyph_id - firstGlyph; |
651 | return i >= classArray.len ? outOfRange : classArray.arrayZ[i]; |
652 | } |
653 | unsigned int get_class (hb_codepoint_t glyph_id, |
654 | unsigned int num_glyphs HB_UNUSED, |
655 | unsigned int outOfRange) const |
656 | { |
657 | return get_class (glyph_id, outOfRange); |
658 | } |
659 | bool sanitize (hb_sanitize_context_t *c) const |
660 | { |
661 | TRACE_SANITIZE (this); |
662 | return_trace (c->check_struct (this) && classArray.sanitize (c)); |
663 | } |
664 | protected: |
665 | GlyphID firstGlyph; /* First glyph index included in the trimmed array. */ |
666 | ArrayOf<HBUCHAR> classArray; /* The class codes (indexed by glyph index minus |
667 | * firstGlyph). */ |
668 | public: |
669 | DEFINE_SIZE_ARRAY (4, classArray); |
670 | }; |
671 | |
672 | struct ObsoleteTypes |
673 | { |
674 | static constexpr bool extended = false; |
675 | typedef HBUINT16 HBUINT; |
676 | typedef HBUINT8 HBUSHORT; |
677 | typedef ClassTable<HBUINT8> ClassTypeNarrow; |
678 | typedef ClassTable<HBUINT16> ClassTypeWide; |
679 | |
680 | template <typename T> |
681 | static unsigned int offsetToIndex (unsigned int offset, |
682 | const void *base, |
683 | const T *array) |
684 | { |
685 | return (offset - ((const char *) array - (const char *) base)) / sizeof (T); |
686 | } |
687 | template <typename T> |
688 | static unsigned int byteOffsetToIndex (unsigned int offset, |
689 | const void *base, |
690 | const T *array) |
691 | { |
692 | return offsetToIndex (offset, base, array); |
693 | } |
694 | template <typename T> |
695 | static unsigned int wordOffsetToIndex (unsigned int offset, |
696 | const void *base, |
697 | const T *array) |
698 | { |
699 | return offsetToIndex (2 * offset, base, array); |
700 | } |
701 | }; |
702 | struct ExtendedTypes |
703 | { |
704 | static constexpr bool extended = true; |
705 | typedef HBUINT32 HBUINT; |
706 | typedef HBUINT16 HBUSHORT; |
707 | typedef Lookup<HBUINT16> ClassTypeNarrow; |
708 | typedef Lookup<HBUINT16> ClassTypeWide; |
709 | |
710 | template <typename T> |
711 | static unsigned int offsetToIndex (unsigned int offset, |
712 | const void *base HB_UNUSED, |
713 | const T *array HB_UNUSED) |
714 | { |
715 | return offset; |
716 | } |
717 | template <typename T> |
718 | static unsigned int byteOffsetToIndex (unsigned int offset, |
719 | const void *base HB_UNUSED, |
720 | const T *array HB_UNUSED) |
721 | { |
722 | return offset / 2; |
723 | } |
724 | template <typename T> |
725 | static unsigned int wordOffsetToIndex (unsigned int offset, |
726 | const void *base HB_UNUSED, |
727 | const T *array HB_UNUSED) |
728 | { |
729 | return offset; |
730 | } |
731 | }; |
732 | |
733 | template <typename Types, typename EntryData> |
734 | struct StateTableDriver |
735 | { |
736 | StateTableDriver (const StateTable<Types, EntryData> &machine_, |
737 | hb_buffer_t *buffer_, |
738 | hb_face_t *face_) : |
739 | machine (machine_), |
740 | buffer (buffer_), |
741 | num_glyphs (face_->get_num_glyphs ()) {} |
742 | |
743 | template <typename context_t> |
744 | void drive (context_t *c) |
745 | { |
746 | if (!c->in_place) |
747 | buffer->clear_output (); |
748 | |
749 | int state = StateTable<Types, EntryData>::STATE_START_OF_TEXT; |
750 | for (buffer->idx = 0; buffer->successful;) |
751 | { |
752 | unsigned int klass = buffer->idx < buffer->len ? |
753 | machine.get_class (buffer->info[buffer->idx].codepoint, num_glyphs) : |
754 | (unsigned) StateTable<Types, EntryData>::CLASS_END_OF_TEXT; |
755 | DEBUG_MSG (APPLY, nullptr, "c%u at %u" , klass, buffer->idx); |
756 | const Entry<EntryData> &entry = machine.get_entry (state, klass); |
757 | |
758 | /* Unsafe-to-break before this if not in state 0, as things might |
759 | * go differently if we start from state 0 here. |
760 | * |
761 | * Ugh. The indexing here is ugly... */ |
762 | if (state && buffer->backtrack_len () && buffer->idx < buffer->len) |
763 | { |
764 | /* If there's no action and we're just epsilon-transitioning to state 0, |
765 | * safe to break. */ |
766 | if (c->is_actionable (this, entry) || |
767 | !(entry.newState == StateTable<Types, EntryData>::STATE_START_OF_TEXT && |
768 | entry.flags == context_t::DontAdvance)) |
769 | buffer->unsafe_to_break_from_outbuffer (buffer->backtrack_len () - 1, buffer->idx + 1); |
770 | } |
771 | |
772 | /* Unsafe-to-break if end-of-text would kick in here. */ |
773 | if (buffer->idx + 2 <= buffer->len) |
774 | { |
775 | const Entry<EntryData> &end_entry = machine.get_entry (state, StateTable<Types, EntryData>::CLASS_END_OF_TEXT); |
776 | if (c->is_actionable (this, end_entry)) |
777 | buffer->unsafe_to_break (buffer->idx, buffer->idx + 2); |
778 | } |
779 | |
780 | c->transition (this, entry); |
781 | |
782 | state = machine.new_state (entry.newState); |
783 | DEBUG_MSG (APPLY, nullptr, "s%d" , state); |
784 | |
785 | if (buffer->idx == buffer->len) |
786 | break; |
787 | |
788 | if (!(entry.flags & context_t::DontAdvance) || buffer->max_ops-- <= 0) |
789 | buffer->next_glyph (); |
790 | } |
791 | |
792 | if (!c->in_place) |
793 | { |
794 | for (; buffer->successful && buffer->idx < buffer->len;) |
795 | buffer->next_glyph (); |
796 | buffer->swap_buffers (); |
797 | } |
798 | } |
799 | |
800 | public: |
801 | const StateTable<Types, EntryData> &machine; |
802 | hb_buffer_t *buffer; |
803 | unsigned int num_glyphs; |
804 | }; |
805 | |
806 | |
807 | struct ankr; |
808 | |
809 | struct hb_aat_apply_context_t : |
810 | hb_dispatch_context_t<hb_aat_apply_context_t, bool, HB_DEBUG_APPLY> |
811 | { |
812 | const char *get_name () { return "APPLY" ; } |
813 | template <typename T> |
814 | return_t dispatch (const T &obj) { return obj.apply (this); } |
815 | static return_t default_return_value () { return false; } |
816 | bool stop_sublookup_iteration (return_t r) const { return r; } |
817 | |
818 | const hb_ot_shape_plan_t *plan; |
819 | hb_font_t *font; |
820 | hb_face_t *face; |
821 | hb_buffer_t *buffer; |
822 | hb_sanitize_context_t sanitizer; |
823 | const ankr *ankr_table; |
824 | |
825 | /* Unused. For debug tracing only. */ |
826 | unsigned int lookup_index; |
827 | unsigned int debug_depth; |
828 | |
829 | HB_INTERNAL hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_, |
830 | hb_font_t *font_, |
831 | hb_buffer_t *buffer_, |
832 | hb_blob_t *blob = const_cast<hb_blob_t *> (&Null(hb_blob_t))); |
833 | |
834 | HB_INTERNAL ~hb_aat_apply_context_t (); |
835 | |
836 | HB_INTERNAL void set_ankr_table (const AAT::ankr *ankr_table_); |
837 | |
838 | void set_lookup_index (unsigned int i) { lookup_index = i; } |
839 | }; |
840 | |
841 | |
842 | } /* namespace AAT */ |
843 | |
844 | |
845 | #endif /* HB_AAT_LAYOUT_COMMON_HH */ |
846 | |