1 | /* |
2 | * Copyright © 2017 Google, Inc. |
3 | * |
4 | * This is part of HarfBuzz, a text shaping library. |
5 | * |
6 | * Permission is hereby granted, without written agreement and without |
7 | * license or royalty fees, to use, copy, modify, and distribute this |
8 | * software and its documentation for any purpose, provided that the |
9 | * above copyright notice and the following two paragraphs appear in |
10 | * all copies of this software. |
11 | * |
12 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
13 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
14 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
15 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
16 | * DAMAGE. |
17 | * |
18 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
19 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
20 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
21 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
22 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
23 | * |
24 | * Google Author(s): Behdad Esfahbod |
25 | */ |
26 | |
27 | #ifndef HB_AAT_LAYOUT_COMMON_HH |
28 | #define HB_AAT_LAYOUT_COMMON_HH |
29 | |
30 | #include "hb-aat-layout.hh" |
31 | #include "hb-open-type.hh" |
32 | |
33 | |
34 | namespace AAT { |
35 | |
36 | using namespace OT; |
37 | |
38 | |
39 | /* |
40 | * Lookup Table |
41 | */ |
42 | |
43 | template <typename T> struct Lookup; |
44 | |
45 | template <typename T> |
46 | struct LookupFormat0 |
47 | { |
48 | friend struct Lookup<T>; |
49 | |
50 | private: |
51 | const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
52 | { |
53 | if (unlikely (glyph_id >= num_glyphs)) return nullptr; |
54 | return &arrayZ[glyph_id]; |
55 | } |
56 | |
57 | bool sanitize (hb_sanitize_context_t *c) const |
58 | { |
59 | TRACE_SANITIZE (this); |
60 | return_trace (arrayZ.sanitize (c, c->get_num_glyphs ())); |
61 | } |
62 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
63 | { |
64 | TRACE_SANITIZE (this); |
65 | return_trace (arrayZ.sanitize (c, c->get_num_glyphs (), base)); |
66 | } |
67 | |
68 | protected: |
69 | HBUINT16 format; /* Format identifier--format = 0 */ |
70 | UnsizedArrayOf<T> |
71 | arrayZ; /* Array of lookup values, indexed by glyph index. */ |
72 | public: |
73 | DEFINE_SIZE_UNBOUNDED (2); |
74 | }; |
75 | |
76 | |
77 | template <typename T> |
78 | struct LookupSegmentSingle |
79 | { |
80 | static constexpr unsigned TerminationWordCount = 2u; |
81 | |
82 | int cmp (hb_codepoint_t g) const |
83 | { return g < first ? -1 : g <= last ? 0 : +1 ; } |
84 | |
85 | bool sanitize (hb_sanitize_context_t *c) const |
86 | { |
87 | TRACE_SANITIZE (this); |
88 | return_trace (c->check_struct (this) && value.sanitize (c)); |
89 | } |
90 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
91 | { |
92 | TRACE_SANITIZE (this); |
93 | return_trace (c->check_struct (this) && value.sanitize (c, base)); |
94 | } |
95 | |
96 | HBGlyphID last; /* Last GlyphID in this segment */ |
97 | HBGlyphID first; /* First GlyphID in this segment */ |
98 | T value; /* The lookup value (only one) */ |
99 | public: |
100 | DEFINE_SIZE_STATIC (4 + T::static_size); |
101 | }; |
102 | |
103 | template <typename T> |
104 | struct LookupFormat2 |
105 | { |
106 | friend struct Lookup<T>; |
107 | |
108 | private: |
109 | const T* get_value (hb_codepoint_t glyph_id) const |
110 | { |
111 | const LookupSegmentSingle<T> *v = segments.bsearch (glyph_id); |
112 | return v ? &v->value : nullptr; |
113 | } |
114 | |
115 | bool sanitize (hb_sanitize_context_t *c) const |
116 | { |
117 | TRACE_SANITIZE (this); |
118 | return_trace (segments.sanitize (c)); |
119 | } |
120 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
121 | { |
122 | TRACE_SANITIZE (this); |
123 | return_trace (segments.sanitize (c, base)); |
124 | } |
125 | |
126 | protected: |
127 | HBUINT16 format; /* Format identifier--format = 2 */ |
128 | VarSizedBinSearchArrayOf<LookupSegmentSingle<T>> |
129 | segments; /* The actual segments. These must already be sorted, |
130 | * according to the first word in each one (the last |
131 | * glyph in each segment). */ |
132 | public: |
133 | DEFINE_SIZE_ARRAY (8, segments); |
134 | }; |
135 | |
136 | template <typename T> |
137 | struct LookupSegmentArray |
138 | { |
139 | static constexpr unsigned TerminationWordCount = 2u; |
140 | |
141 | const T* get_value (hb_codepoint_t glyph_id, const void *base) const |
142 | { |
143 | return first <= glyph_id && glyph_id <= last ? &(base+valuesZ)[glyph_id - first] : nullptr; |
144 | } |
145 | |
146 | int cmp (hb_codepoint_t g) const |
147 | { return g < first ? -1 : g <= last ? 0 : +1; } |
148 | |
149 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
150 | { |
151 | TRACE_SANITIZE (this); |
152 | return_trace (c->check_struct (this) && |
153 | first <= last && |
154 | valuesZ.sanitize (c, base, last - first + 1)); |
155 | } |
156 | template <typename ...Ts> |
157 | bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const |
158 | { |
159 | TRACE_SANITIZE (this); |
160 | return_trace (c->check_struct (this) && |
161 | first <= last && |
162 | valuesZ.sanitize (c, base, last - first + 1, hb_forward<Ts> (ds)...)); |
163 | } |
164 | |
165 | HBGlyphID last; /* Last GlyphID in this segment */ |
166 | HBGlyphID first; /* First GlyphID in this segment */ |
167 | NNOffsetTo<UnsizedArrayOf<T>> |
168 | valuesZ; /* A 16-bit offset from the start of |
169 | * the table to the data. */ |
170 | public: |
171 | DEFINE_SIZE_STATIC (6); |
172 | }; |
173 | |
174 | template <typename T> |
175 | struct LookupFormat4 |
176 | { |
177 | friend struct Lookup<T>; |
178 | |
179 | private: |
180 | const T* get_value (hb_codepoint_t glyph_id) const |
181 | { |
182 | const LookupSegmentArray<T> *v = segments.bsearch (glyph_id); |
183 | return v ? v->get_value (glyph_id, this) : nullptr; |
184 | } |
185 | |
186 | bool sanitize (hb_sanitize_context_t *c) const |
187 | { |
188 | TRACE_SANITIZE (this); |
189 | return_trace (segments.sanitize (c, this)); |
190 | } |
191 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
192 | { |
193 | TRACE_SANITIZE (this); |
194 | return_trace (segments.sanitize (c, this, base)); |
195 | } |
196 | |
197 | protected: |
198 | HBUINT16 format; /* Format identifier--format = 4 */ |
199 | VarSizedBinSearchArrayOf<LookupSegmentArray<T>> |
200 | segments; /* The actual segments. These must already be sorted, |
201 | * according to the first word in each one (the last |
202 | * glyph in each segment). */ |
203 | public: |
204 | DEFINE_SIZE_ARRAY (8, segments); |
205 | }; |
206 | |
207 | template <typename T> |
208 | struct LookupSingle |
209 | { |
210 | static constexpr unsigned TerminationWordCount = 1u; |
211 | |
212 | int cmp (hb_codepoint_t g) const { return glyph.cmp (g); } |
213 | |
214 | bool sanitize (hb_sanitize_context_t *c) const |
215 | { |
216 | TRACE_SANITIZE (this); |
217 | return_trace (c->check_struct (this) && value.sanitize (c)); |
218 | } |
219 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
220 | { |
221 | TRACE_SANITIZE (this); |
222 | return_trace (c->check_struct (this) && value.sanitize (c, base)); |
223 | } |
224 | |
225 | HBGlyphID glyph; /* Last GlyphID */ |
226 | T value; /* The lookup value (only one) */ |
227 | public: |
228 | DEFINE_SIZE_STATIC (2 + T::static_size); |
229 | }; |
230 | |
231 | template <typename T> |
232 | struct LookupFormat6 |
233 | { |
234 | friend struct Lookup<T>; |
235 | |
236 | private: |
237 | const T* get_value (hb_codepoint_t glyph_id) const |
238 | { |
239 | const LookupSingle<T> *v = entries.bsearch (glyph_id); |
240 | return v ? &v->value : nullptr; |
241 | } |
242 | |
243 | bool sanitize (hb_sanitize_context_t *c) const |
244 | { |
245 | TRACE_SANITIZE (this); |
246 | return_trace (entries.sanitize (c)); |
247 | } |
248 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
249 | { |
250 | TRACE_SANITIZE (this); |
251 | return_trace (entries.sanitize (c, base)); |
252 | } |
253 | |
254 | protected: |
255 | HBUINT16 format; /* Format identifier--format = 6 */ |
256 | VarSizedBinSearchArrayOf<LookupSingle<T>> |
257 | entries; /* The actual entries, sorted by glyph index. */ |
258 | public: |
259 | DEFINE_SIZE_ARRAY (8, entries); |
260 | }; |
261 | |
262 | template <typename T> |
263 | struct LookupFormat8 |
264 | { |
265 | friend struct Lookup<T>; |
266 | |
267 | private: |
268 | const T* get_value (hb_codepoint_t glyph_id) const |
269 | { |
270 | return firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount ? |
271 | &valueArrayZ[glyph_id - firstGlyph] : nullptr; |
272 | } |
273 | |
274 | bool sanitize (hb_sanitize_context_t *c) const |
275 | { |
276 | TRACE_SANITIZE (this); |
277 | return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount)); |
278 | } |
279 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
280 | { |
281 | TRACE_SANITIZE (this); |
282 | return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount, base)); |
283 | } |
284 | |
285 | protected: |
286 | HBUINT16 format; /* Format identifier--format = 8 */ |
287 | HBGlyphID firstGlyph; /* First glyph index included in the trimmed array. */ |
288 | HBUINT16 glyphCount; /* Total number of glyphs (equivalent to the last |
289 | * glyph minus the value of firstGlyph plus 1). */ |
290 | UnsizedArrayOf<T> |
291 | valueArrayZ; /* The lookup values (indexed by the glyph index |
292 | * minus the value of firstGlyph). */ |
293 | public: |
294 | DEFINE_SIZE_ARRAY (6, valueArrayZ); |
295 | }; |
296 | |
297 | template <typename T> |
298 | struct LookupFormat10 |
299 | { |
300 | friend struct Lookup<T>; |
301 | |
302 | private: |
303 | const typename T::type get_value_or_null (hb_codepoint_t glyph_id) const |
304 | { |
305 | if (!(firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount)) |
306 | return Null(T); |
307 | |
308 | const HBUINT8 *p = &valueArrayZ[(glyph_id - firstGlyph) * valueSize]; |
309 | |
310 | unsigned int v = 0; |
311 | unsigned int count = valueSize; |
312 | for (unsigned int i = 0; i < count; i++) |
313 | v = (v << 8) | *p++; |
314 | |
315 | return v; |
316 | } |
317 | |
318 | bool sanitize (hb_sanitize_context_t *c) const |
319 | { |
320 | TRACE_SANITIZE (this); |
321 | return_trace (c->check_struct (this) && |
322 | valueSize <= 4 && |
323 | valueArrayZ.sanitize (c, glyphCount * valueSize)); |
324 | } |
325 | |
326 | protected: |
327 | HBUINT16 format; /* Format identifier--format = 8 */ |
328 | HBUINT16 valueSize; /* Byte size of each value. */ |
329 | HBGlyphID firstGlyph; /* First glyph index included in the trimmed array. */ |
330 | HBUINT16 glyphCount; /* Total number of glyphs (equivalent to the last |
331 | * glyph minus the value of firstGlyph plus 1). */ |
332 | UnsizedArrayOf<HBUINT8> |
333 | valueArrayZ; /* The lookup values (indexed by the glyph index |
334 | * minus the value of firstGlyph). */ |
335 | public: |
336 | DEFINE_SIZE_ARRAY (8, valueArrayZ); |
337 | }; |
338 | |
339 | template <typename T> |
340 | struct Lookup |
341 | { |
342 | const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
343 | { |
344 | switch (u.format) { |
345 | case 0: return u.format0.get_value (glyph_id, num_glyphs); |
346 | case 2: return u.format2.get_value (glyph_id); |
347 | case 4: return u.format4.get_value (glyph_id); |
348 | case 6: return u.format6.get_value (glyph_id); |
349 | case 8: return u.format8.get_value (glyph_id); |
350 | default:return nullptr; |
351 | } |
352 | } |
353 | |
354 | const typename T::type get_value_or_null (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
355 | { |
356 | switch (u.format) { |
357 | /* Format 10 cannot return a pointer. */ |
358 | case 10: return u.format10.get_value_or_null (glyph_id); |
359 | default: |
360 | const T *v = get_value (glyph_id, num_glyphs); |
361 | return v ? *v : Null(T); |
362 | } |
363 | } |
364 | |
365 | typename T::type get_class (hb_codepoint_t glyph_id, |
366 | unsigned int num_glyphs, |
367 | unsigned int outOfRange) const |
368 | { |
369 | const T *v = get_value (glyph_id, num_glyphs); |
370 | return v ? *v : outOfRange; |
371 | } |
372 | |
373 | bool sanitize (hb_sanitize_context_t *c) const |
374 | { |
375 | TRACE_SANITIZE (this); |
376 | if (!u.format.sanitize (c)) return_trace (false); |
377 | switch (u.format) { |
378 | case 0: return_trace (u.format0.sanitize (c)); |
379 | case 2: return_trace (u.format2.sanitize (c)); |
380 | case 4: return_trace (u.format4.sanitize (c)); |
381 | case 6: return_trace (u.format6.sanitize (c)); |
382 | case 8: return_trace (u.format8.sanitize (c)); |
383 | case 10: return_trace (u.format10.sanitize (c)); |
384 | default:return_trace (true); |
385 | } |
386 | } |
387 | bool sanitize (hb_sanitize_context_t *c, const void *base) const |
388 | { |
389 | TRACE_SANITIZE (this); |
390 | if (!u.format.sanitize (c)) return_trace (false); |
391 | switch (u.format) { |
392 | case 0: return_trace (u.format0.sanitize (c, base)); |
393 | case 2: return_trace (u.format2.sanitize (c, base)); |
394 | case 4: return_trace (u.format4.sanitize (c, base)); |
395 | case 6: return_trace (u.format6.sanitize (c, base)); |
396 | case 8: return_trace (u.format8.sanitize (c, base)); |
397 | case 10: return_trace (false); /* We don't support format10 here currently. */ |
398 | default:return_trace (true); |
399 | } |
400 | } |
401 | |
402 | protected: |
403 | union { |
404 | HBUINT16 format; /* Format identifier */ |
405 | LookupFormat0<T> format0; |
406 | LookupFormat2<T> format2; |
407 | LookupFormat4<T> format4; |
408 | LookupFormat6<T> format6; |
409 | LookupFormat8<T> format8; |
410 | LookupFormat10<T> format10; |
411 | } u; |
412 | public: |
413 | DEFINE_SIZE_UNION (2, format); |
414 | }; |
415 | /* Lookup 0 has unbounded size (dependant on num_glyphs). So we need to defined |
416 | * special NULL objects for Lookup<> objects, but since it's template our macros |
417 | * don't work. So we have to hand-code them here. UGLY. */ |
418 | } /* Close namespace. */ |
419 | /* Ugly hand-coded null objects for template Lookup<> :(. */ |
420 | extern HB_INTERNAL const unsigned char _hb_Null_AAT_Lookup[2]; |
421 | template <typename T> |
422 | struct Null<AAT::Lookup<T>> { |
423 | static AAT::Lookup<T> const & get_null () |
424 | { return *reinterpret_cast<const AAT::Lookup<T> *> (_hb_Null_AAT_Lookup); } |
425 | }; |
426 | namespace AAT { |
427 | |
428 | enum { DELETED_GLYPH = 0xFFFF }; |
429 | |
430 | /* |
431 | * (Extended) State Table |
432 | */ |
433 | |
434 | template <typename T> |
435 | struct Entry |
436 | { |
437 | bool sanitize (hb_sanitize_context_t *c, unsigned int count) const |
438 | { |
439 | TRACE_SANITIZE (this); |
440 | /* Note, we don't recurse-sanitize data because we don't access it. |
441 | * That said, in our DEFINE_SIZE_STATIC we access T::static_size, |
442 | * which ensures that data has a simple sanitize(). To be determined |
443 | * if I need to remove that as well. |
444 | * |
445 | * HOWEVER! Because we are a template, our DEFINE_SIZE_STATIC |
446 | * assertion wouldn't be checked, hence the line below. */ |
447 | static_assert (T::static_size, "" ); |
448 | |
449 | return_trace (c->check_struct (this)); |
450 | } |
451 | |
452 | public: |
453 | HBUINT16 newState; /* Byte offset from beginning of state table |
454 | * to the new state. Really?!?! Or just state |
455 | * number? The latter in morx for sure. */ |
456 | HBUINT16 flags; /* Table specific. */ |
457 | T data; /* Optional offsets to per-glyph tables. */ |
458 | public: |
459 | DEFINE_SIZE_STATIC (4 + T::static_size); |
460 | }; |
461 | |
462 | template <> |
463 | struct Entry<void> |
464 | { |
465 | bool sanitize (hb_sanitize_context_t *c, unsigned int count /*XXX Unused?*/) const |
466 | { |
467 | TRACE_SANITIZE (this); |
468 | return_trace (c->check_struct (this)); |
469 | } |
470 | |
471 | public: |
472 | HBUINT16 newState; /* Byte offset from beginning of state table to the new state. */ |
473 | HBUINT16 flags; /* Table specific. */ |
474 | public: |
475 | DEFINE_SIZE_STATIC (4); |
476 | }; |
477 | |
478 | template <typename Types, typename Extra> |
479 | struct StateTable |
480 | { |
481 | typedef typename Types::HBUINT HBUINT; |
482 | typedef typename Types::HBUSHORT HBUSHORT; |
483 | typedef typename Types::ClassTypeNarrow ClassType; |
484 | |
485 | enum State |
486 | { |
487 | STATE_START_OF_TEXT = 0, |
488 | STATE_START_OF_LINE = 1, |
489 | }; |
490 | enum Class |
491 | { |
492 | CLASS_END_OF_TEXT = 0, |
493 | CLASS_OUT_OF_BOUNDS = 1, |
494 | CLASS_DELETED_GLYPH = 2, |
495 | CLASS_END_OF_LINE = 3, |
496 | }; |
497 | |
498 | int new_state (unsigned int newState) const |
499 | { return Types::extended ? newState : ((int) newState - (int) stateArrayTable) / (int) nClasses; } |
500 | |
501 | unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs) const |
502 | { |
503 | if (unlikely (glyph_id == DELETED_GLYPH)) return CLASS_DELETED_GLYPH; |
504 | return (this+classTable).get_class (glyph_id, num_glyphs, 1); |
505 | } |
506 | |
507 | const Entry<Extra> *get_entries () const |
508 | { return (this+entryTable).arrayZ; } |
509 | |
510 | const Entry<Extra> &get_entry (int state, unsigned int klass) const |
511 | { |
512 | if (unlikely (klass >= nClasses)) |
513 | klass = StateTable<Types, Entry<Extra>>::CLASS_OUT_OF_BOUNDS; |
514 | |
515 | const HBUSHORT *states = (this+stateArrayTable).arrayZ; |
516 | const Entry<Extra> *entries = (this+entryTable).arrayZ; |
517 | |
518 | unsigned int entry = states[state * nClasses + klass]; |
519 | DEBUG_MSG (APPLY, nullptr, "e%u" , entry); |
520 | |
521 | return entries[entry]; |
522 | } |
523 | |
524 | bool sanitize (hb_sanitize_context_t *c, |
525 | unsigned int *num_entries_out = nullptr) const |
526 | { |
527 | TRACE_SANITIZE (this); |
528 | if (unlikely (!(c->check_struct (this) && |
529 | nClasses >= 4 /* Ensure pre-defined classes fit. */ && |
530 | classTable.sanitize (c, this)))) return_trace (false); |
531 | |
532 | const HBUSHORT *states = (this+stateArrayTable).arrayZ; |
533 | const Entry<Extra> *entries = (this+entryTable).arrayZ; |
534 | |
535 | unsigned int num_classes = nClasses; |
536 | if (unlikely (hb_unsigned_mul_overflows (num_classes, states[0].static_size))) |
537 | return_trace (false); |
538 | unsigned int row_stride = num_classes * states[0].static_size; |
539 | |
540 | /* Apple 'kern' table has this peculiarity: |
541 | * |
542 | * "Because the stateTableOffset in the state table header is (strictly |
543 | * speaking) redundant, some 'kern' tables use it to record an initial |
544 | * state where that should not be StartOfText. To determine if this is |
545 | * done, calculate what the stateTableOffset should be. If it's different |
546 | * from the actual stateTableOffset, use it as the initial state." |
547 | * |
548 | * We implement this by calling the initial state zero, but allow *negative* |
549 | * states if the start state indeed was not the first state. Since the code |
550 | * is shared, this will also apply to 'mort' table. The 'kerx' / 'morx' |
551 | * tables are not affected since those address states by index, not offset. |
552 | */ |
553 | |
554 | int min_state = 0; |
555 | int max_state = 0; |
556 | unsigned int num_entries = 0; |
557 | |
558 | int state_pos = 0; |
559 | int state_neg = 0; |
560 | unsigned int entry = 0; |
561 | while (min_state < state_neg || state_pos <= max_state) |
562 | { |
563 | if (min_state < state_neg) |
564 | { |
565 | /* Negative states. */ |
566 | if (unlikely (hb_unsigned_mul_overflows (min_state, num_classes))) |
567 | return_trace (false); |
568 | if (unlikely (!c->check_range (&states[min_state * num_classes], |
569 | -min_state, |
570 | row_stride))) |
571 | return_trace (false); |
572 | if ((c->max_ops -= state_neg - min_state) <= 0) |
573 | return_trace (false); |
574 | { /* Sweep new states. */ |
575 | const HBUSHORT *stop = &states[min_state * num_classes]; |
576 | if (unlikely (stop > states)) |
577 | return_trace (false); |
578 | for (const HBUSHORT *p = states; stop < p; p--) |
579 | num_entries = hb_max (num_entries, *(p - 1) + 1); |
580 | state_neg = min_state; |
581 | } |
582 | } |
583 | |
584 | if (state_pos <= max_state) |
585 | { |
586 | /* Positive states. */ |
587 | if (unlikely (!c->check_range (states, |
588 | max_state + 1, |
589 | row_stride))) |
590 | return_trace (false); |
591 | if ((c->max_ops -= max_state - state_pos + 1) <= 0) |
592 | return_trace (false); |
593 | { /* Sweep new states. */ |
594 | if (unlikely (hb_unsigned_mul_overflows ((max_state + 1), num_classes))) |
595 | return_trace (false); |
596 | const HBUSHORT *stop = &states[(max_state + 1) * num_classes]; |
597 | if (unlikely (stop < states)) |
598 | return_trace (false); |
599 | for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++) |
600 | num_entries = hb_max (num_entries, *p + 1); |
601 | state_pos = max_state + 1; |
602 | } |
603 | } |
604 | |
605 | if (unlikely (!c->check_array (entries, num_entries))) |
606 | return_trace (false); |
607 | if ((c->max_ops -= num_entries - entry) <= 0) |
608 | return_trace (false); |
609 | { /* Sweep new entries. */ |
610 | const Entry<Extra> *stop = &entries[num_entries]; |
611 | for (const Entry<Extra> *p = &entries[entry]; p < stop; p++) |
612 | { |
613 | int newState = new_state (p->newState); |
614 | min_state = hb_min (min_state, newState); |
615 | max_state = hb_max (max_state, newState); |
616 | } |
617 | entry = num_entries; |
618 | } |
619 | } |
620 | |
621 | if (num_entries_out) |
622 | *num_entries_out = num_entries; |
623 | |
624 | return_trace (true); |
625 | } |
626 | |
627 | protected: |
628 | HBUINT nClasses; /* Number of classes, which is the number of indices |
629 | * in a single line in the state array. */ |
630 | NNOffsetTo<ClassType, HBUINT> |
631 | classTable; /* Offset to the class table. */ |
632 | NNOffsetTo<UnsizedArrayOf<HBUSHORT>, HBUINT> |
633 | stateArrayTable;/* Offset to the state array. */ |
634 | NNOffsetTo<UnsizedArrayOf<Entry<Extra>>, HBUINT> |
635 | entryTable; /* Offset to the entry array. */ |
636 | |
637 | public: |
638 | DEFINE_SIZE_STATIC (4 * sizeof (HBUINT)); |
639 | }; |
640 | |
641 | template <typename HBUCHAR> |
642 | struct ClassTable |
643 | { |
644 | unsigned int get_class (hb_codepoint_t glyph_id, unsigned int outOfRange) const |
645 | { |
646 | unsigned int i = glyph_id - firstGlyph; |
647 | return i >= classArray.len ? outOfRange : classArray.arrayZ[i]; |
648 | } |
649 | unsigned int get_class (hb_codepoint_t glyph_id, |
650 | unsigned int num_glyphs HB_UNUSED, |
651 | unsigned int outOfRange) const |
652 | { |
653 | return get_class (glyph_id, outOfRange); |
654 | } |
655 | bool sanitize (hb_sanitize_context_t *c) const |
656 | { |
657 | TRACE_SANITIZE (this); |
658 | return_trace (c->check_struct (this) && classArray.sanitize (c)); |
659 | } |
660 | protected: |
661 | HBGlyphID firstGlyph; /* First glyph index included in the trimmed array. */ |
662 | ArrayOf<HBUCHAR> classArray; /* The class codes (indexed by glyph index minus |
663 | * firstGlyph). */ |
664 | public: |
665 | DEFINE_SIZE_ARRAY (4, classArray); |
666 | }; |
667 | |
668 | struct ObsoleteTypes |
669 | { |
670 | static constexpr bool extended = false; |
671 | typedef HBUINT16 HBUINT; |
672 | typedef HBUINT8 HBUSHORT; |
673 | typedef ClassTable<HBUINT8> ClassTypeNarrow; |
674 | typedef ClassTable<HBUINT16> ClassTypeWide; |
675 | |
676 | template <typename T> |
677 | static unsigned int offsetToIndex (unsigned int offset, |
678 | const void *base, |
679 | const T *array) |
680 | { |
681 | return (offset - ((const char *) array - (const char *) base)) / T::static_size; |
682 | } |
683 | template <typename T> |
684 | static unsigned int byteOffsetToIndex (unsigned int offset, |
685 | const void *base, |
686 | const T *array) |
687 | { |
688 | return offsetToIndex (offset, base, array); |
689 | } |
690 | template <typename T> |
691 | static unsigned int wordOffsetToIndex (unsigned int offset, |
692 | const void *base, |
693 | const T *array) |
694 | { |
695 | return offsetToIndex (2 * offset, base, array); |
696 | } |
697 | }; |
698 | struct ExtendedTypes |
699 | { |
700 | static constexpr bool extended = true; |
701 | typedef HBUINT32 HBUINT; |
702 | typedef HBUINT16 HBUSHORT; |
703 | typedef Lookup<HBUINT16> ClassTypeNarrow; |
704 | typedef Lookup<HBUINT16> ClassTypeWide; |
705 | |
706 | template <typename T> |
707 | static unsigned int offsetToIndex (unsigned int offset, |
708 | const void *base HB_UNUSED, |
709 | const T *array HB_UNUSED) |
710 | { |
711 | return offset; |
712 | } |
713 | template <typename T> |
714 | static unsigned int byteOffsetToIndex (unsigned int offset, |
715 | const void *base HB_UNUSED, |
716 | const T *array HB_UNUSED) |
717 | { |
718 | return offset / 2; |
719 | } |
720 | template <typename T> |
721 | static unsigned int wordOffsetToIndex (unsigned int offset, |
722 | const void *base HB_UNUSED, |
723 | const T *array HB_UNUSED) |
724 | { |
725 | return offset; |
726 | } |
727 | }; |
728 | |
729 | template <typename Types, typename EntryData> |
730 | struct StateTableDriver |
731 | { |
732 | StateTableDriver (const StateTable<Types, EntryData> &machine_, |
733 | hb_buffer_t *buffer_, |
734 | hb_face_t *face_) : |
735 | machine (machine_), |
736 | buffer (buffer_), |
737 | num_glyphs (face_->get_num_glyphs ()) {} |
738 | |
739 | template <typename context_t> |
740 | void drive (context_t *c) |
741 | { |
742 | if (!c->in_place) |
743 | buffer->clear_output (); |
744 | |
745 | int state = StateTable<Types, EntryData>::STATE_START_OF_TEXT; |
746 | for (buffer->idx = 0; buffer->successful;) |
747 | { |
748 | unsigned int klass = buffer->idx < buffer->len ? |
749 | machine.get_class (buffer->info[buffer->idx].codepoint, num_glyphs) : |
750 | (unsigned) StateTable<Types, EntryData>::CLASS_END_OF_TEXT; |
751 | DEBUG_MSG (APPLY, nullptr, "c%u at %u" , klass, buffer->idx); |
752 | const Entry<EntryData> &entry = machine.get_entry (state, klass); |
753 | |
754 | /* Unsafe-to-break before this if not in state 0, as things might |
755 | * go differently if we start from state 0 here. |
756 | * |
757 | * Ugh. The indexing here is ugly... */ |
758 | if (state && buffer->backtrack_len () && buffer->idx < buffer->len) |
759 | { |
760 | /* If there's no action and we're just epsilon-transitioning to state 0, |
761 | * safe to break. */ |
762 | if (c->is_actionable (this, entry) || |
763 | !(entry.newState == StateTable<Types, EntryData>::STATE_START_OF_TEXT && |
764 | entry.flags == context_t::DontAdvance)) |
765 | buffer->unsafe_to_break_from_outbuffer (buffer->backtrack_len () - 1, buffer->idx + 1); |
766 | } |
767 | |
768 | /* Unsafe-to-break if end-of-text would kick in here. */ |
769 | if (buffer->idx + 2 <= buffer->len) |
770 | { |
771 | const Entry<EntryData> &end_entry = machine.get_entry (state, StateTable<Types, EntryData>::CLASS_END_OF_TEXT); |
772 | if (c->is_actionable (this, end_entry)) |
773 | buffer->unsafe_to_break (buffer->idx, buffer->idx + 2); |
774 | } |
775 | |
776 | c->transition (this, entry); |
777 | |
778 | state = machine.new_state (entry.newState); |
779 | DEBUG_MSG (APPLY, nullptr, "s%d" , state); |
780 | |
781 | if (buffer->idx == buffer->len) |
782 | break; |
783 | |
784 | if (!(entry.flags & context_t::DontAdvance) || buffer->max_ops-- <= 0) |
785 | buffer->next_glyph (); |
786 | } |
787 | |
788 | if (!c->in_place) |
789 | { |
790 | for (; buffer->successful && buffer->idx < buffer->len;) |
791 | buffer->next_glyph (); |
792 | buffer->swap_buffers (); |
793 | } |
794 | } |
795 | |
796 | public: |
797 | const StateTable<Types, EntryData> &machine; |
798 | hb_buffer_t *buffer; |
799 | unsigned int num_glyphs; |
800 | }; |
801 | |
802 | |
803 | struct ankr; |
804 | |
805 | struct hb_aat_apply_context_t : |
806 | hb_dispatch_context_t<hb_aat_apply_context_t, bool, HB_DEBUG_APPLY> |
807 | { |
808 | const char *get_name () { return "APPLY" ; } |
809 | template <typename T> |
810 | return_t dispatch (const T &obj) { return obj.apply (this); } |
811 | static return_t default_return_value () { return false; } |
812 | bool stop_sublookup_iteration (return_t r) const { return r; } |
813 | |
814 | const hb_ot_shape_plan_t *plan; |
815 | hb_font_t *font; |
816 | hb_face_t *face; |
817 | hb_buffer_t *buffer; |
818 | hb_sanitize_context_t sanitizer; |
819 | const ankr *ankr_table; |
820 | |
821 | /* Unused. For debug tracing only. */ |
822 | unsigned int lookup_index; |
823 | unsigned int debug_depth; |
824 | |
825 | HB_INTERNAL hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_, |
826 | hb_font_t *font_, |
827 | hb_buffer_t *buffer_, |
828 | hb_blob_t *blob = const_cast<hb_blob_t *> (&Null(hb_blob_t))); |
829 | |
830 | HB_INTERNAL ~hb_aat_apply_context_t (); |
831 | |
832 | HB_INTERNAL void set_ankr_table (const AAT::ankr *ankr_table_); |
833 | |
834 | void set_lookup_index (unsigned int i) { lookup_index = i; } |
835 | }; |
836 | |
837 | |
838 | } /* namespace AAT */ |
839 | |
840 | |
841 | #endif /* HB_AAT_LAYOUT_COMMON_HH */ |
842 | |