1#ifndef OT_GLYF_COMPOSITEGLYPH_HH
2#define OT_GLYF_COMPOSITEGLYPH_HH
3
4
5#include "../../hb-open-type.hh"
6#include "composite-iter.hh"
7
8
9namespace OT {
10namespace glyf_impl {
11
12
13struct CompositeGlyphRecord
14{
15 protected:
16 enum composite_glyph_flag_t
17 {
18 ARG_1_AND_2_ARE_WORDS = 0x0001,
19 ARGS_ARE_XY_VALUES = 0x0002,
20 ROUND_XY_TO_GRID = 0x0004,
21 WE_HAVE_A_SCALE = 0x0008,
22 MORE_COMPONENTS = 0x0020,
23 WE_HAVE_AN_X_AND_Y_SCALE = 0x0040,
24 WE_HAVE_A_TWO_BY_TWO = 0x0080,
25 WE_HAVE_INSTRUCTIONS = 0x0100,
26 USE_MY_METRICS = 0x0200,
27 OVERLAP_COMPOUND = 0x0400,
28 SCALED_COMPONENT_OFFSET = 0x0800,
29 UNSCALED_COMPONENT_OFFSET = 0x1000,
30#ifndef HB_NO_BEYOND_64K
31 GID_IS_24BIT = 0x2000
32#endif
33 };
34
35 public:
36 unsigned int get_size () const
37 {
38 unsigned int size = min_size;
39 /* glyphIndex is 24bit instead of 16bit */
40#ifndef HB_NO_BEYOND_64K
41 if (flags & GID_IS_24BIT) size += HBGlyphID24::static_size - HBGlyphID16::static_size;
42#endif
43 /* arg1 and 2 are int16 */
44 if (flags & ARG_1_AND_2_ARE_WORDS) size += 4;
45 /* arg1 and 2 are int8 */
46 else size += 2;
47
48 /* One x 16 bit (scale) */
49 if (flags & WE_HAVE_A_SCALE) size += 2;
50 /* Two x 16 bit (xscale, yscale) */
51 else if (flags & WE_HAVE_AN_X_AND_Y_SCALE) size += 4;
52 /* Four x 16 bit (xscale, scale01, scale10, yscale) */
53 else if (flags & WE_HAVE_A_TWO_BY_TWO) size += 8;
54
55 return size;
56 }
57
58 void drop_instructions_flag () { flags = (uint16_t) flags & ~WE_HAVE_INSTRUCTIONS; }
59 void set_overlaps_flag ()
60 {
61 flags = (uint16_t) flags | OVERLAP_COMPOUND;
62 }
63
64 bool has_instructions () const { return flags & WE_HAVE_INSTRUCTIONS; }
65
66 bool has_more () const { return flags & MORE_COMPONENTS; }
67 bool is_use_my_metrics () const { return flags & USE_MY_METRICS; }
68 bool is_anchored () const { return !(flags & ARGS_ARE_XY_VALUES); }
69 void get_anchor_points (unsigned int &point1, unsigned int &point2) const
70 {
71 const auto *p = &StructAfter<const HBUINT8> (flags);
72#ifndef HB_NO_BEYOND_64K
73 if (flags & GID_IS_24BIT)
74 p += HBGlyphID24::static_size;
75 else
76#endif
77 p += HBGlyphID16::static_size;
78 if (flags & ARG_1_AND_2_ARE_WORDS)
79 {
80 point1 = ((const HBUINT16 *) p)[0];
81 point2 = ((const HBUINT16 *) p)[1];
82 }
83 else
84 {
85 point1 = p[0];
86 point2 = p[1];
87 }
88 }
89
90 static void transform (const float (&matrix)[4],
91 hb_array_t<contour_point_t> points)
92 {
93 if (matrix[0] != 1.f || matrix[1] != 0.f ||
94 matrix[2] != 0.f || matrix[3] != 1.f)
95 for (auto &point : points)
96 point.transform (matrix);
97 }
98
99 static void translate (const contour_point_t &trans,
100 hb_array_t<contour_point_t> points)
101 {
102 if (HB_OPTIMIZE_SIZE_VAL)
103 {
104 if (trans.x != 0.f || trans.y != 0.f)
105 for (auto &point : points)
106 point.translate (trans);
107 }
108 else
109 {
110 if (trans.x != 0.f && trans.y != 0.f)
111 for (auto &point : points)
112 point.translate (trans);
113 else
114 {
115 if (trans.x != 0.f)
116 for (auto &point : points)
117 point.x += trans.x;
118 else if (trans.y != 0.f)
119 for (auto &point : points)
120 point.y += trans.y;
121 }
122 }
123 }
124
125 void transform_points (hb_array_t<contour_point_t> points,
126 const float (&matrix)[4],
127 const contour_point_t &trans) const
128 {
129 if (scaled_offsets ())
130 {
131 translate (trans, points);
132 transform (matrix, points);
133 }
134 else
135 {
136 transform (matrix, points);
137 translate (trans, points);
138 }
139 }
140
141 bool get_points (contour_point_vector_t &points) const
142 {
143 float matrix[4];
144 contour_point_t trans;
145 get_transformation (matrix, trans);
146 if (unlikely (!points.alloc (points.length + 4))) return false; // For phantom points
147 points.push (trans);
148 return true;
149 }
150
151 unsigned compile_with_point (const contour_point_t &point,
152 char *out) const
153 {
154 const HBINT8 *p = &StructAfter<const HBINT8> (flags);
155#ifndef HB_NO_BEYOND_64K
156 if (flags & GID_IS_24BIT)
157 p += HBGlyphID24::static_size;
158 else
159#endif
160 p += HBGlyphID16::static_size;
161
162 unsigned len = get_size ();
163 unsigned len_before_val = (const char *)p - (const char *)this;
164 if (flags & ARG_1_AND_2_ARE_WORDS)
165 {
166 // no overflow, copy value
167 hb_memcpy (out, this, len);
168
169 HBINT16 *o = reinterpret_cast<HBINT16 *> (out + len_before_val);
170 o[0] = roundf (point.x);
171 o[1] = roundf (point.y);
172 }
173 else
174 {
175 int new_x = roundf (point.x);
176 int new_y = roundf (point.y);
177 if (new_x <= 127 && new_x >= -128 &&
178 new_y <= 127 && new_y >= -128)
179 {
180 hb_memcpy (out, this, len);
181 HBINT8 *o = reinterpret_cast<HBINT8 *> (out + len_before_val);
182 o[0] = new_x;
183 o[1] = new_y;
184 }
185 else
186 {
187 // new point value has an int8 overflow
188 hb_memcpy (out, this, len_before_val);
189
190 //update flags
191 CompositeGlyphRecord *o = reinterpret_cast<CompositeGlyphRecord *> (out);
192 o->flags = flags | ARG_1_AND_2_ARE_WORDS;
193 out += len_before_val;
194
195 HBINT16 new_value;
196 new_value = new_x;
197 hb_memcpy (out, &new_value, HBINT16::static_size);
198 out += HBINT16::static_size;
199
200 new_value = new_y;
201 hb_memcpy (out, &new_value, HBINT16::static_size);
202 out += HBINT16::static_size;
203
204 hb_memcpy (out, p+2, len - len_before_val - 2);
205 len += 2;
206 }
207 }
208 return len;
209 }
210
211 protected:
212 bool scaled_offsets () const
213 { return (flags & (SCALED_COMPONENT_OFFSET | UNSCALED_COMPONENT_OFFSET)) == SCALED_COMPONENT_OFFSET; }
214
215 public:
216 bool get_transformation (float (&matrix)[4], contour_point_t &trans) const
217 {
218 matrix[0] = matrix[3] = 1.f;
219 matrix[1] = matrix[2] = 0.f;
220
221 const auto *p = &StructAfter<const HBINT8> (flags);
222#ifndef HB_NO_BEYOND_64K
223 if (flags & GID_IS_24BIT)
224 p += HBGlyphID24::static_size;
225 else
226#endif
227 p += HBGlyphID16::static_size;
228 int tx, ty;
229 if (flags & ARG_1_AND_2_ARE_WORDS)
230 {
231 tx = *(const HBINT16 *) p;
232 p += HBINT16::static_size;
233 ty = *(const HBINT16 *) p;
234 p += HBINT16::static_size;
235 }
236 else
237 {
238 tx = *p++;
239 ty = *p++;
240 }
241 if (is_anchored ()) tx = ty = 0;
242
243 trans.init ((float) tx, (float) ty);
244
245 {
246 const F2DOT14 *points = (const F2DOT14 *) p;
247 if (flags & WE_HAVE_A_SCALE)
248 {
249 matrix[0] = matrix[3] = points[0].to_float ();
250 return true;
251 }
252 else if (flags & WE_HAVE_AN_X_AND_Y_SCALE)
253 {
254 matrix[0] = points[0].to_float ();
255 matrix[3] = points[1].to_float ();
256 return true;
257 }
258 else if (flags & WE_HAVE_A_TWO_BY_TWO)
259 {
260 matrix[0] = points[0].to_float ();
261 matrix[1] = points[1].to_float ();
262 matrix[2] = points[2].to_float ();
263 matrix[3] = points[3].to_float ();
264 return true;
265 }
266 }
267 return tx || ty;
268 }
269
270 hb_codepoint_t get_gid () const
271 {
272#ifndef HB_NO_BEYOND_64K
273 if (flags & GID_IS_24BIT)
274 return StructAfter<const HBGlyphID24> (flags);
275 else
276#endif
277 return StructAfter<const HBGlyphID16> (flags);
278 }
279 void set_gid (hb_codepoint_t gid)
280 {
281#ifndef HB_NO_BEYOND_64K
282 if (flags & GID_IS_24BIT)
283 StructAfter<HBGlyphID24> (flags) = gid;
284 else
285#endif
286 /* TODO assert? */
287 StructAfter<HBGlyphID16> (flags) = gid;
288 }
289
290#ifndef HB_NO_BEYOND_64K
291 void lower_gid_24_to_16 ()
292 {
293 hb_codepoint_t gid = get_gid ();
294 if (!(flags & GID_IS_24BIT) || gid > 0xFFFFu)
295 return;
296
297 /* Lower the flag and move the rest of the struct down. */
298
299 unsigned size = get_size ();
300 char *end = (char *) this + size;
301 char *p = &StructAfter<char> (flags);
302 p += HBGlyphID24::static_size;
303
304 flags = flags & ~GID_IS_24BIT;
305 set_gid (gid);
306
307 memmove (p - HBGlyphID24::static_size + HBGlyphID16::static_size, p, end - p);
308 }
309#endif
310
311 protected:
312 HBUINT16 flags;
313 HBUINT24 pad;
314 public:
315 DEFINE_SIZE_MIN (4);
316};
317
318using composite_iter_t = composite_iter_tmpl<CompositeGlyphRecord>;
319
320struct CompositeGlyph
321{
322 const GlyphHeader &header;
323 hb_bytes_t bytes;
324 CompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
325 header (header_), bytes (bytes_) {}
326
327 composite_iter_t iter () const
328 { return composite_iter_t (bytes, &StructAfter<CompositeGlyphRecord, GlyphHeader> (header)); }
329
330 unsigned int instructions_length (hb_bytes_t bytes) const
331 {
332 unsigned int start = bytes.length;
333 unsigned int end = bytes.length;
334 const CompositeGlyphRecord *last = nullptr;
335 for (auto &item : iter ())
336 last = &item;
337 if (unlikely (!last)) return 0;
338
339 if (last->has_instructions ())
340 start = (char *) last - &bytes + last->get_size ();
341 if (unlikely (start > end)) return 0;
342 return end - start;
343 }
344
345 /* Trimming for composites not implemented.
346 * If removing hints it falls out of that. */
347 const hb_bytes_t trim_padding () const { return bytes; }
348
349 void drop_hints ()
350 {
351 for (const auto &_ : iter ())
352 const_cast<CompositeGlyphRecord &> (_).drop_instructions_flag ();
353 }
354
355 /* Chop instructions off the end */
356 void drop_hints_bytes (hb_bytes_t &dest_start) const
357 { dest_start = bytes.sub_array (0, bytes.length - instructions_length (bytes)); }
358
359 void set_overlaps_flag ()
360 {
361 CompositeGlyphRecord& glyph_chain = const_cast<CompositeGlyphRecord &> (
362 StructAfter<CompositeGlyphRecord, GlyphHeader> (header));
363 if (!bytes.check_range(&glyph_chain, CompositeGlyphRecord::min_size))
364 return;
365 glyph_chain.set_overlaps_flag ();
366 }
367
368 bool compile_bytes_with_deltas (const hb_bytes_t &source_bytes,
369 const contour_point_vector_t &points_with_deltas,
370 hb_bytes_t &dest_bytes /* OUT */)
371 {
372 if (source_bytes.length <= GlyphHeader::static_size ||
373 header.numberOfContours != -1)
374 {
375 dest_bytes = hb_bytes_t ();
376 return true;
377 }
378
379 unsigned source_len = source_bytes.length - GlyphHeader::static_size;
380
381 /* try to allocate more memories than source glyph bytes
382 * in case that there might be an overflow for int8 value
383 * and we would need to use int16 instead */
384 char *o = (char *) hb_calloc (source_len * 2, sizeof (char));
385 if (unlikely (!o)) return false;
386
387 const CompositeGlyphRecord *c = reinterpret_cast<const CompositeGlyphRecord *> (source_bytes.arrayZ + GlyphHeader::static_size);
388 auto it = composite_iter_t (hb_bytes_t ((const char *)c, source_len), c);
389
390 char *p = o;
391 unsigned i = 0, source_comp_len = 0;
392 for (const auto &component : it)
393 {
394 /* last 4 points in points_with_deltas are phantom points and should not be included */
395 if (i >= points_with_deltas.length - 4) {
396 hb_free (o);
397 return false;
398 }
399
400 unsigned comp_len = component.get_size ();
401 if (component.is_anchored ())
402 {
403 hb_memcpy (p, &component, comp_len);
404 p += comp_len;
405 }
406 else
407 {
408 unsigned new_len = component.compile_with_point (points_with_deltas[i], p);
409 p += new_len;
410 }
411 i++;
412 source_comp_len += comp_len;
413 }
414
415 //copy instructions if any
416 if (source_len > source_comp_len)
417 {
418 unsigned instr_len = source_len - source_comp_len;
419 hb_memcpy (p, (const char *)c + source_comp_len, instr_len);
420 p += instr_len;
421 }
422
423 unsigned len = p - o;
424 dest_bytes = hb_bytes_t (o, len);
425 return true;
426 }
427};
428
429
430} /* namespace glyf_impl */
431} /* namespace OT */
432
433
434#endif /* OT_GLYF_COMPOSITEGLYPH_HH */
435