1 | /* |
2 | * Copyright © 2007,2008,2009 Red Hat, Inc. |
3 | * Copyright © 2010,2012 Google, Inc. |
4 | * |
5 | * This is part of HarfBuzz, a text shaping library. |
6 | * |
7 | * Permission is hereby granted, without written agreement and without |
8 | * license or royalty fees, to use, copy, modify, and distribute this |
9 | * software and its documentation for any purpose, provided that the |
10 | * above copyright notice and the following two paragraphs appear in |
11 | * all copies of this software. |
12 | * |
13 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | * DAMAGE. |
18 | * |
19 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | * |
25 | * Red Hat Author(s): Behdad Esfahbod |
26 | * Google Author(s): Behdad Esfahbod, Garret Rieger |
27 | */ |
28 | |
29 | #ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH |
30 | #define OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH |
31 | |
32 | #include "RangeRecord.hh" |
33 | |
34 | namespace OT { |
35 | namespace Layout { |
36 | namespace Common { |
37 | |
38 | template <typename Types> |
39 | struct CoverageFormat2_4 |
40 | { |
41 | friend struct Coverage; |
42 | |
43 | protected: |
44 | HBUINT16 coverageFormat; /* Format identifier--format = 2 */ |
45 | SortedArray16Of<RangeRecord<Types>> |
46 | rangeRecord; /* Array of glyph ranges--ordered by |
47 | * Start GlyphID. rangeCount entries |
48 | * long */ |
49 | public: |
50 | DEFINE_SIZE_ARRAY (4, rangeRecord); |
51 | |
52 | private: |
53 | |
54 | bool sanitize (hb_sanitize_context_t *c) const |
55 | { |
56 | TRACE_SANITIZE (this); |
57 | return_trace (rangeRecord.sanitize (c)); |
58 | } |
59 | |
60 | unsigned int get_coverage (hb_codepoint_t glyph_id) const |
61 | { |
62 | const RangeRecord<Types> &range = rangeRecord.bsearch (glyph_id); |
63 | return likely (range.first <= range.last) |
64 | ? (unsigned int) range.value + (glyph_id - range.first) |
65 | : NOT_COVERED; |
66 | } |
67 | |
68 | unsigned get_population () const |
69 | { |
70 | typename Types::large_int ret = 0; |
71 | for (const auto &r : rangeRecord) |
72 | ret += r.get_population (); |
73 | return ret > UINT_MAX ? UINT_MAX : (unsigned) ret; |
74 | } |
75 | |
76 | template <typename Iterator, |
77 | hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))> |
78 | bool serialize (hb_serialize_context_t *c, Iterator glyphs) |
79 | { |
80 | TRACE_SERIALIZE (this); |
81 | if (unlikely (!c->extend_min (this))) return_trace (false); |
82 | |
83 | unsigned num_ranges = 0; |
84 | hb_codepoint_t last = (hb_codepoint_t) -2; |
85 | for (auto g: glyphs) |
86 | { |
87 | if (last + 1 != g) |
88 | num_ranges++; |
89 | last = g; |
90 | } |
91 | |
92 | if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false); |
93 | if (!num_ranges) return_trace (true); |
94 | |
95 | unsigned count = 0; |
96 | unsigned range = (unsigned) -1; |
97 | last = (hb_codepoint_t) -2; |
98 | unsigned unsorted = false; |
99 | for (auto g: glyphs) |
100 | { |
101 | if (last + 1 != g) |
102 | { |
103 | if (unlikely (last != (hb_codepoint_t) -2 && last + 1 > g)) |
104 | unsorted = true; |
105 | |
106 | range++; |
107 | rangeRecord.arrayZ[range].first = g; |
108 | rangeRecord.arrayZ[range].value = count; |
109 | } |
110 | rangeRecord.arrayZ[range].last = g; |
111 | last = g; |
112 | count++; |
113 | } |
114 | |
115 | if (unlikely (unsorted)) |
116 | rangeRecord.as_array ().qsort (RangeRecord<Types>::cmp_range); |
117 | |
118 | return_trace (true); |
119 | } |
120 | |
121 | bool intersects (const hb_set_t *glyphs) const |
122 | { |
123 | if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2) |
124 | { |
125 | for (auto g : *glyphs) |
126 | if (get_coverage (g) != NOT_COVERED) |
127 | return true; |
128 | return false; |
129 | } |
130 | |
131 | return hb_any (+ hb_iter (rangeRecord) |
132 | | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs); })); |
133 | } |
134 | bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const |
135 | { |
136 | auto *range = rangeRecord.as_array ().bsearch (index); |
137 | if (range) |
138 | return range->intersects (*glyphs); |
139 | return false; |
140 | } |
141 | |
142 | template <typename IterableOut, |
143 | hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))> |
144 | void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const |
145 | { |
146 | /* Break out of loop for overlapping, broken, tables, |
147 | * to avoid fuzzer timouts. */ |
148 | hb_codepoint_t last = 0; |
149 | for (const auto& range : rangeRecord) |
150 | { |
151 | if (unlikely (range.first < last)) |
152 | break; |
153 | last = range.last; |
154 | for (hb_codepoint_t g = range.first - 1; |
155 | glyphs.next (&g) && g <= last;) |
156 | intersect_glyphs << g; |
157 | } |
158 | } |
159 | |
160 | template <typename set_t> |
161 | bool collect_coverage (set_t *glyphs) const |
162 | { |
163 | for (const auto& range: rangeRecord) |
164 | if (unlikely (!range.collect_coverage (glyphs))) |
165 | return false; |
166 | return true; |
167 | } |
168 | |
169 | public: |
170 | /* Older compilers need this to be public. */ |
171 | struct iter_t |
172 | { |
173 | void init (const CoverageFormat2_4 &c_) |
174 | { |
175 | c = &c_; |
176 | coverage = 0; |
177 | i = 0; |
178 | j = c->rangeRecord.len ? c->rangeRecord[0].first : 0; |
179 | if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last)) |
180 | { |
181 | /* Broken table. Skip. */ |
182 | i = c->rangeRecord.len; |
183 | j = 0; |
184 | } |
185 | } |
186 | bool __more__ () const { return i < c->rangeRecord.len; } |
187 | void __next__ () |
188 | { |
189 | if (j >= c->rangeRecord[i].last) |
190 | { |
191 | i++; |
192 | if (__more__ ()) |
193 | { |
194 | unsigned int old = coverage; |
195 | j = c->rangeRecord.arrayZ[i].first; |
196 | coverage = c->rangeRecord.arrayZ[i].value; |
197 | if (unlikely (coverage != old + 1)) |
198 | { |
199 | /* Broken table. Skip. Important to avoid DoS. |
200 | * Also, our callers depend on coverage being |
201 | * consecutive and monotonically increasing, |
202 | * ie. iota(). */ |
203 | i = c->rangeRecord.len; |
204 | j = 0; |
205 | return; |
206 | } |
207 | } |
208 | else |
209 | j = 0; |
210 | return; |
211 | } |
212 | coverage++; |
213 | j++; |
214 | } |
215 | hb_codepoint_t get_glyph () const { return j; } |
216 | bool operator != (const iter_t& o) const |
217 | { return i != o.i || j != o.j; } |
218 | iter_t __end__ () const |
219 | { |
220 | iter_t it; |
221 | it.init (*c); |
222 | it.i = c->rangeRecord.len; |
223 | it.j = 0; |
224 | return it; |
225 | } |
226 | |
227 | private: |
228 | const struct CoverageFormat2_4 *c; |
229 | unsigned int i, coverage; |
230 | hb_codepoint_t j; |
231 | }; |
232 | private: |
233 | }; |
234 | |
235 | } |
236 | } |
237 | } |
238 | |
239 | #endif // #ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH |
240 | |