| 1 | /* |
| 2 | * Copyright © 1998-2004 David Turner and Werner Lemberg |
| 3 | * Copyright © 2004,2007,2009,2010 Red Hat, Inc. |
| 4 | * Copyright © 2011,2012 Google, Inc. |
| 5 | * |
| 6 | * This is part of HarfBuzz, a text shaping library. |
| 7 | * |
| 8 | * Permission is hereby granted, without written agreement and without |
| 9 | * license or royalty fees, to use, copy, modify, and distribute this |
| 10 | * software and its documentation for any purpose, provided that the |
| 11 | * above copyright notice and the following two paragraphs appear in |
| 12 | * all copies of this software. |
| 13 | * |
| 14 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
| 15 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
| 16 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
| 17 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
| 18 | * DAMAGE. |
| 19 | * |
| 20 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
| 21 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
| 22 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
| 23 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
| 24 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
| 25 | * |
| 26 | * Red Hat Author(s): Owen Taylor, Behdad Esfahbod |
| 27 | * Google Author(s): Behdad Esfahbod |
| 28 | */ |
| 29 | |
| 30 | #ifndef HB_BUFFER_HH |
| 31 | #define HB_BUFFER_HH |
| 32 | |
| 33 | #include "hb.hh" |
| 34 | #include "hb-unicode.hh" |
| 35 | #include "hb-set-digest.hh" |
| 36 | |
| 37 | |
| 38 | static_assert ((sizeof (hb_glyph_info_t) == 20), "" ); |
| 39 | static_assert ((sizeof (hb_glyph_info_t) == sizeof (hb_glyph_position_t)), "" ); |
| 40 | |
| 41 | HB_MARK_AS_FLAG_T (hb_glyph_flags_t); |
| 42 | HB_MARK_AS_FLAG_T (hb_buffer_flags_t); |
| 43 | HB_MARK_AS_FLAG_T (hb_buffer_serialize_flags_t); |
| 44 | HB_MARK_AS_FLAG_T (hb_buffer_diff_flags_t); |
| 45 | |
| 46 | enum hb_buffer_scratch_flags_t { |
| 47 | HB_BUFFER_SCRATCH_FLAG_DEFAULT = 0x00000000u, |
| 48 | HB_BUFFER_SCRATCH_FLAG_HAS_NON_ASCII = 0x00000001u, |
| 49 | HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES = 0x00000002u, |
| 50 | HB_BUFFER_SCRATCH_FLAG_HAS_SPACE_FALLBACK = 0x00000004u, |
| 51 | HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT = 0x00000008u, |
| 52 | HB_BUFFER_SCRATCH_FLAG_HAS_CGJ = 0x00000010u, |
| 53 | HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS = 0x00000020u, |
| 54 | HB_BUFFER_SCRATCH_FLAG_HAS_BROKEN_SYLLABLE = 0x00000040u, |
| 55 | |
| 56 | /* Reserved for shapers' internal use. */ |
| 57 | HB_BUFFER_SCRATCH_FLAG_SHAPER0 = 0x01000000u, |
| 58 | HB_BUFFER_SCRATCH_FLAG_SHAPER1 = 0x02000000u, |
| 59 | HB_BUFFER_SCRATCH_FLAG_SHAPER2 = 0x04000000u, |
| 60 | HB_BUFFER_SCRATCH_FLAG_SHAPER3 = 0x08000000u, |
| 61 | }; |
| 62 | HB_MARK_AS_FLAG_T (hb_buffer_scratch_flags_t); |
| 63 | |
| 64 | |
| 65 | /* |
| 66 | * hb_buffer_t |
| 67 | */ |
| 68 | |
| 69 | struct hb_buffer_t |
| 70 | { |
| 71 | hb_object_header_t ; |
| 72 | |
| 73 | /* |
| 74 | * Information about how the text in the buffer should be treated. |
| 75 | */ |
| 76 | |
| 77 | hb_unicode_funcs_t *unicode; /* Unicode functions */ |
| 78 | hb_buffer_flags_t flags; /* BOT / EOT / etc. */ |
| 79 | hb_buffer_cluster_level_t cluster_level; |
| 80 | hb_codepoint_t replacement; /* U+FFFD or something else. */ |
| 81 | hb_codepoint_t invisible; /* 0 or something else. */ |
| 82 | hb_codepoint_t not_found; /* 0 or something else. */ |
| 83 | |
| 84 | /* |
| 85 | * Buffer contents |
| 86 | */ |
| 87 | |
| 88 | hb_buffer_content_type_t content_type; |
| 89 | hb_segment_properties_t props; /* Script, language, direction */ |
| 90 | |
| 91 | bool successful; /* Allocations successful */ |
| 92 | bool shaping_failed; /* Shaping failure */ |
| 93 | bool have_output; /* Whether we have an output buffer going on */ |
| 94 | bool have_positions; /* Whether we have positions */ |
| 95 | |
| 96 | unsigned int idx; /* Cursor into ->info and ->pos arrays */ |
| 97 | unsigned int len; /* Length of ->info and ->pos arrays */ |
| 98 | unsigned int out_len; /* Length of ->out_info array if have_output */ |
| 99 | |
| 100 | unsigned int allocated; /* Length of allocated arrays */ |
| 101 | hb_glyph_info_t *info; |
| 102 | hb_glyph_info_t *out_info; |
| 103 | hb_glyph_position_t *pos; |
| 104 | |
| 105 | /* Text before / after the main buffer contents. |
| 106 | * Always in Unicode, and ordered outward. |
| 107 | * Index 0 is for "pre-context", 1 for "post-context". */ |
| 108 | static constexpr unsigned CONTEXT_LENGTH = 5u; |
| 109 | hb_codepoint_t context[2][CONTEXT_LENGTH]; |
| 110 | unsigned int context_len[2]; |
| 111 | |
| 112 | |
| 113 | /* |
| 114 | * Managed by enter / leave |
| 115 | */ |
| 116 | |
| 117 | uint8_t allocated_var_bits; |
| 118 | uint8_t serial; |
| 119 | hb_buffer_scratch_flags_t scratch_flags; /* Have space-fallback, etc. */ |
| 120 | unsigned int max_len; /* Maximum allowed len. */ |
| 121 | int max_ops; /* Maximum allowed operations. */ |
| 122 | /* The bits here reflect current allocations of the bytes in glyph_info_t's var1 and var2. */ |
| 123 | |
| 124 | |
| 125 | /* |
| 126 | * Messaging callback |
| 127 | */ |
| 128 | |
| 129 | #ifndef HB_NO_BUFFER_MESSAGE |
| 130 | hb_buffer_message_func_t message_func; |
| 131 | void *message_data; |
| 132 | hb_destroy_func_t message_destroy; |
| 133 | unsigned message_depth; /* How deeply are we inside a message callback? */ |
| 134 | #else |
| 135 | static constexpr unsigned message_depth = 0u; |
| 136 | #endif |
| 137 | |
| 138 | |
| 139 | |
| 140 | /* Methods */ |
| 141 | |
| 142 | HB_NODISCARD bool in_error () const { return !successful; } |
| 143 | |
| 144 | void allocate_var (unsigned int start, unsigned int count) |
| 145 | { |
| 146 | unsigned int end = start + count; |
| 147 | assert (end <= 8); |
| 148 | unsigned int bits = (1u<<end) - (1u<<start); |
| 149 | assert (0 == (allocated_var_bits & bits)); |
| 150 | allocated_var_bits |= bits; |
| 151 | } |
| 152 | bool try_allocate_var (unsigned int start, unsigned int count) |
| 153 | { |
| 154 | unsigned int end = start + count; |
| 155 | assert (end <= 8); |
| 156 | unsigned int bits = (1u<<end) - (1u<<start); |
| 157 | if (allocated_var_bits & bits) |
| 158 | return false; |
| 159 | allocated_var_bits |= bits; |
| 160 | return true; |
| 161 | } |
| 162 | void deallocate_var (unsigned int start, unsigned int count) |
| 163 | { |
| 164 | unsigned int end = start + count; |
| 165 | assert (end <= 8); |
| 166 | unsigned int bits = (1u<<end) - (1u<<start); |
| 167 | assert (bits == (allocated_var_bits & bits)); |
| 168 | allocated_var_bits &= ~bits; |
| 169 | } |
| 170 | void assert_var (unsigned int start, unsigned int count) |
| 171 | { |
| 172 | unsigned int end = start + count; |
| 173 | assert (end <= 8); |
| 174 | HB_UNUSED unsigned int bits = (1u<<end) - (1u<<start); |
| 175 | assert (bits == (allocated_var_bits & bits)); |
| 176 | } |
| 177 | void deallocate_var_all () |
| 178 | { |
| 179 | allocated_var_bits = 0; |
| 180 | } |
| 181 | |
| 182 | hb_glyph_info_t &cur (unsigned int i = 0) { return info[idx + i]; } |
| 183 | hb_glyph_info_t cur (unsigned int i = 0) const { return info[idx + i]; } |
| 184 | |
| 185 | hb_glyph_position_t &cur_pos (unsigned int i = 0) { return pos[idx + i]; } |
| 186 | hb_glyph_position_t cur_pos (unsigned int i = 0) const { return pos[idx + i]; } |
| 187 | |
| 188 | hb_glyph_info_t &prev () { return out_info[out_len ? out_len - 1 : 0]; } |
| 189 | hb_glyph_info_t prev () const { return out_info[out_len ? out_len - 1 : 0]; } |
| 190 | |
| 191 | hb_set_digest_t digest () const |
| 192 | { |
| 193 | hb_set_digest_t d; |
| 194 | d.init (); |
| 195 | d.add_array (&info[0].codepoint, len, sizeof (info[0])); |
| 196 | return d; |
| 197 | } |
| 198 | |
| 199 | HB_INTERNAL void similar (const hb_buffer_t &src); |
| 200 | HB_INTERNAL void reset (); |
| 201 | HB_INTERNAL void clear (); |
| 202 | |
| 203 | /* Called around shape() */ |
| 204 | HB_INTERNAL void enter (); |
| 205 | HB_INTERNAL void leave (); |
| 206 | |
| 207 | #ifndef HB_NO_BUFFER_VERIFY |
| 208 | HB_INTERNAL |
| 209 | #endif |
| 210 | bool verify (hb_buffer_t *text_buffer, |
| 211 | hb_font_t *font, |
| 212 | const hb_feature_t *features, |
| 213 | unsigned int num_features, |
| 214 | const char * const *shapers) |
| 215 | #ifndef HB_NO_BUFFER_VERIFY |
| 216 | ; |
| 217 | #else |
| 218 | { return true; } |
| 219 | #endif |
| 220 | |
| 221 | unsigned int backtrack_len () const { return have_output ? out_len : idx; } |
| 222 | unsigned int lookahead_len () const { return len - idx; } |
| 223 | uint8_t next_serial () { return ++serial ? serial : ++serial; } |
| 224 | |
| 225 | HB_INTERNAL void add (hb_codepoint_t codepoint, |
| 226 | unsigned int cluster); |
| 227 | HB_INTERNAL void add_info (const hb_glyph_info_t &glyph_info); |
| 228 | |
| 229 | void reverse_range (unsigned start, unsigned end) |
| 230 | { |
| 231 | hb_array_t<hb_glyph_info_t> (info, len).reverse (start, end); |
| 232 | if (have_positions) |
| 233 | hb_array_t<hb_glyph_position_t> (pos, len).reverse (start, end); |
| 234 | } |
| 235 | void reverse () { reverse_range (0, len); } |
| 236 | |
| 237 | template <typename FuncType> |
| 238 | void reverse_groups (const FuncType& group, |
| 239 | bool merge_clusters = false) |
| 240 | { |
| 241 | if (unlikely (!len)) |
| 242 | return; |
| 243 | |
| 244 | unsigned start = 0; |
| 245 | unsigned i; |
| 246 | for (i = 1; i < len; i++) |
| 247 | { |
| 248 | if (!group (info[i - 1], info[i])) |
| 249 | { |
| 250 | if (merge_clusters) |
| 251 | this->merge_clusters (start, i); |
| 252 | reverse_range (start, i); |
| 253 | start = i; |
| 254 | } |
| 255 | } |
| 256 | if (merge_clusters) |
| 257 | this->merge_clusters (start, i); |
| 258 | reverse_range (start, i); |
| 259 | |
| 260 | reverse (); |
| 261 | } |
| 262 | |
| 263 | template <typename FuncType> |
| 264 | unsigned group_end (unsigned start, const FuncType& group) const |
| 265 | { |
| 266 | while (++start < len && group (info[start - 1], info[start])) |
| 267 | ; |
| 268 | |
| 269 | return start; |
| 270 | } |
| 271 | |
| 272 | static bool _cluster_group_func (const hb_glyph_info_t& a, |
| 273 | const hb_glyph_info_t& b) |
| 274 | { return a.cluster == b.cluster; } |
| 275 | |
| 276 | void reverse_clusters () { reverse_groups (_cluster_group_func); } |
| 277 | |
| 278 | HB_INTERNAL void guess_segment_properties (); |
| 279 | |
| 280 | HB_INTERNAL bool sync (); |
| 281 | HB_INTERNAL int sync_so_far (); |
| 282 | HB_INTERNAL void clear_output (); |
| 283 | HB_INTERNAL void clear_positions (); |
| 284 | |
| 285 | template <typename T> |
| 286 | HB_NODISCARD bool replace_glyphs (unsigned int num_in, |
| 287 | unsigned int num_out, |
| 288 | const T *glyph_data) |
| 289 | { |
| 290 | if (unlikely (!make_room_for (num_in, num_out))) return false; |
| 291 | |
| 292 | assert (idx + num_in <= len); |
| 293 | |
| 294 | merge_clusters (idx, idx + num_in); |
| 295 | |
| 296 | hb_glyph_info_t &orig_info = idx < len ? cur() : prev(); |
| 297 | |
| 298 | hb_glyph_info_t *pinfo = &out_info[out_len]; |
| 299 | for (unsigned int i = 0; i < num_out; i++) |
| 300 | { |
| 301 | *pinfo = orig_info; |
| 302 | pinfo->codepoint = glyph_data[i]; |
| 303 | pinfo++; |
| 304 | } |
| 305 | |
| 306 | idx += num_in; |
| 307 | out_len += num_out; |
| 308 | return true; |
| 309 | } |
| 310 | |
| 311 | HB_NODISCARD bool replace_glyph (hb_codepoint_t glyph_index) |
| 312 | { return replace_glyphs (1, 1, &glyph_index); } |
| 313 | |
| 314 | /* Makes a copy of the glyph at idx to output and replace glyph_index */ |
| 315 | HB_NODISCARD bool output_glyph (hb_codepoint_t glyph_index) |
| 316 | { return replace_glyphs (0, 1, &glyph_index); } |
| 317 | |
| 318 | HB_NODISCARD bool output_info (const hb_glyph_info_t &glyph_info) |
| 319 | { |
| 320 | if (unlikely (!make_room_for (0, 1))) return false; |
| 321 | |
| 322 | out_info[out_len] = glyph_info; |
| 323 | |
| 324 | out_len++; |
| 325 | return true; |
| 326 | } |
| 327 | /* Copies glyph at idx to output but doesn't advance idx */ |
| 328 | HB_NODISCARD bool copy_glyph () |
| 329 | { |
| 330 | /* Extra copy because cur()'s return can be freed within |
| 331 | * output_info() call if buffer reallocates. */ |
| 332 | return output_info (hb_glyph_info_t (cur())); |
| 333 | } |
| 334 | |
| 335 | /* Copies glyph at idx to output and advance idx. |
| 336 | * If there's no output, just advance idx. */ |
| 337 | HB_NODISCARD bool next_glyph () |
| 338 | { |
| 339 | if (have_output) |
| 340 | { |
| 341 | if (out_info != info || out_len != idx) |
| 342 | { |
| 343 | if (unlikely (!make_room_for (1, 1))) return false; |
| 344 | out_info[out_len] = info[idx]; |
| 345 | } |
| 346 | out_len++; |
| 347 | } |
| 348 | |
| 349 | idx++; |
| 350 | return true; |
| 351 | } |
| 352 | /* Copies n glyphs at idx to output and advance idx. |
| 353 | * If there's no output, just advance idx. */ |
| 354 | HB_NODISCARD bool next_glyphs (unsigned int n) |
| 355 | { |
| 356 | if (have_output) |
| 357 | { |
| 358 | if (out_info != info || out_len != idx) |
| 359 | { |
| 360 | if (unlikely (!make_room_for (n, n))) return false; |
| 361 | memmove (out_info + out_len, info + idx, n * sizeof (out_info[0])); |
| 362 | } |
| 363 | out_len += n; |
| 364 | } |
| 365 | |
| 366 | idx += n; |
| 367 | return true; |
| 368 | } |
| 369 | /* Advance idx without copying to output. */ |
| 370 | void skip_glyph () { idx++; } |
| 371 | void reset_masks (hb_mask_t mask) |
| 372 | { |
| 373 | for (unsigned int j = 0; j < len; j++) |
| 374 | info[j].mask = mask; |
| 375 | } |
| 376 | void add_masks (hb_mask_t mask) |
| 377 | { |
| 378 | for (unsigned int j = 0; j < len; j++) |
| 379 | info[j].mask |= mask; |
| 380 | } |
| 381 | HB_INTERNAL void set_masks (hb_mask_t value, hb_mask_t mask, |
| 382 | unsigned int cluster_start, unsigned int cluster_end); |
| 383 | |
| 384 | void merge_clusters (unsigned int start, unsigned int end) |
| 385 | { |
| 386 | if (end - start < 2) |
| 387 | return; |
| 388 | merge_clusters_impl (start, end); |
| 389 | } |
| 390 | HB_INTERNAL void merge_clusters_impl (unsigned int start, unsigned int end); |
| 391 | HB_INTERNAL void merge_out_clusters (unsigned int start, unsigned int end); |
| 392 | /* Merge clusters for deleting current glyph, and skip it. */ |
| 393 | HB_INTERNAL void delete_glyph (); |
| 394 | HB_INTERNAL void delete_glyphs_inplace (bool (*filter) (const hb_glyph_info_t *info)); |
| 395 | |
| 396 | |
| 397 | |
| 398 | /* Adds glyph flags in mask to infos with clusters between start and end. |
| 399 | * The start index will be from out-buffer if from_out_buffer is true. |
| 400 | * If interior is true, then the cluster having the minimum value is skipped. */ |
| 401 | void _set_glyph_flags (hb_mask_t mask, |
| 402 | unsigned start = 0, |
| 403 | unsigned end = (unsigned) -1, |
| 404 | bool interior = false, |
| 405 | bool from_out_buffer = false) |
| 406 | { |
| 407 | end = hb_min (end, len); |
| 408 | |
| 409 | if (interior && !from_out_buffer && end - start < 2) |
| 410 | return; |
| 411 | |
| 412 | scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS; |
| 413 | |
| 414 | if (!from_out_buffer || !have_output) |
| 415 | { |
| 416 | if (!interior) |
| 417 | { |
| 418 | for (unsigned i = start; i < end; i++) |
| 419 | info[i].mask |= mask; |
| 420 | } |
| 421 | else |
| 422 | { |
| 423 | unsigned cluster = _infos_find_min_cluster (info, start, end); |
| 424 | _infos_set_glyph_flags (info, start, end, cluster, mask); |
| 425 | } |
| 426 | } |
| 427 | else |
| 428 | { |
| 429 | assert (start <= out_len); |
| 430 | assert (idx <= end); |
| 431 | |
| 432 | if (!interior) |
| 433 | { |
| 434 | for (unsigned i = start; i < out_len; i++) |
| 435 | out_info[i].mask |= mask; |
| 436 | for (unsigned i = idx; i < end; i++) |
| 437 | info[i].mask |= mask; |
| 438 | } |
| 439 | else |
| 440 | { |
| 441 | unsigned cluster = _infos_find_min_cluster (info, idx, end); |
| 442 | cluster = _infos_find_min_cluster (out_info, start, out_len, cluster); |
| 443 | |
| 444 | _infos_set_glyph_flags (out_info, start, out_len, cluster, mask); |
| 445 | _infos_set_glyph_flags (info, idx, end, cluster, mask); |
| 446 | } |
| 447 | } |
| 448 | } |
| 449 | |
| 450 | void unsafe_to_break (unsigned int start = 0, unsigned int end = -1) |
| 451 | { |
| 452 | _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK | HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, |
| 453 | start, end, |
| 454 | true); |
| 455 | } |
| 456 | void safe_to_insert_tatweel (unsigned int start = 0, unsigned int end = -1) |
| 457 | { |
| 458 | if ((flags & HB_BUFFER_FLAG_PRODUCE_SAFE_TO_INSERT_TATWEEL) == 0) |
| 459 | { |
| 460 | unsafe_to_break (start, end); |
| 461 | return; |
| 462 | } |
| 463 | _set_glyph_flags (HB_GLYPH_FLAG_SAFE_TO_INSERT_TATWEEL, |
| 464 | start, end, |
| 465 | true); |
| 466 | } |
| 467 | #ifndef HB_OPTIMIZE_SIZE |
| 468 | HB_ALWAYS_INLINE |
| 469 | #endif |
| 470 | void unsafe_to_concat (unsigned int start = 0, unsigned int end = -1) |
| 471 | { |
| 472 | if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0)) |
| 473 | return; |
| 474 | _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, |
| 475 | start, end, |
| 476 | false); |
| 477 | } |
| 478 | void unsafe_to_break_from_outbuffer (unsigned int start = 0, unsigned int end = -1) |
| 479 | { |
| 480 | _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_BREAK | HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, |
| 481 | start, end, |
| 482 | true, true); |
| 483 | } |
| 484 | #ifndef HB_OPTIMIZE_SIZE |
| 485 | HB_ALWAYS_INLINE |
| 486 | #endif |
| 487 | void unsafe_to_concat_from_outbuffer (unsigned int start = 0, unsigned int end = -1) |
| 488 | { |
| 489 | if (likely ((flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT) == 0)) |
| 490 | return; |
| 491 | _set_glyph_flags (HB_GLYPH_FLAG_UNSAFE_TO_CONCAT, |
| 492 | start, end, |
| 493 | false, true); |
| 494 | } |
| 495 | |
| 496 | |
| 497 | /* Internal methods */ |
| 498 | HB_NODISCARD HB_INTERNAL bool move_to (unsigned int i); /* i is output-buffer index. */ |
| 499 | |
| 500 | HB_NODISCARD HB_INTERNAL bool enlarge (unsigned int size); |
| 501 | |
| 502 | HB_NODISCARD bool resize (unsigned length) |
| 503 | { |
| 504 | assert (!have_output); |
| 505 | if (unlikely (!ensure (length))) return false; |
| 506 | len = length; |
| 507 | return true; |
| 508 | } |
| 509 | HB_NODISCARD bool ensure (unsigned int size) |
| 510 | { return likely (!size || size < allocated) ? true : enlarge (size); } |
| 511 | |
| 512 | HB_NODISCARD bool ensure_inplace (unsigned int size) |
| 513 | { return likely (!size || size < allocated); } |
| 514 | |
| 515 | void assert_glyphs () |
| 516 | { |
| 517 | assert ((content_type == HB_BUFFER_CONTENT_TYPE_GLYPHS) || |
| 518 | (!len && (content_type == HB_BUFFER_CONTENT_TYPE_INVALID))); |
| 519 | } |
| 520 | void assert_unicode () |
| 521 | { |
| 522 | assert ((content_type == HB_BUFFER_CONTENT_TYPE_UNICODE) || |
| 523 | (!len && (content_type == HB_BUFFER_CONTENT_TYPE_INVALID))); |
| 524 | } |
| 525 | HB_NODISCARD bool ensure_glyphs () |
| 526 | { |
| 527 | if (unlikely (content_type != HB_BUFFER_CONTENT_TYPE_GLYPHS)) |
| 528 | { |
| 529 | if (content_type != HB_BUFFER_CONTENT_TYPE_INVALID) |
| 530 | return false; |
| 531 | assert (len == 0); |
| 532 | content_type = HB_BUFFER_CONTENT_TYPE_GLYPHS; |
| 533 | } |
| 534 | return true; |
| 535 | } |
| 536 | HB_NODISCARD bool ensure_unicode () |
| 537 | { |
| 538 | if (unlikely (content_type != HB_BUFFER_CONTENT_TYPE_UNICODE)) |
| 539 | { |
| 540 | if (content_type != HB_BUFFER_CONTENT_TYPE_INVALID) |
| 541 | return false; |
| 542 | assert (len == 0); |
| 543 | content_type = HB_BUFFER_CONTENT_TYPE_UNICODE; |
| 544 | } |
| 545 | return true; |
| 546 | } |
| 547 | |
| 548 | HB_NODISCARD HB_INTERNAL bool make_room_for (unsigned int num_in, unsigned int num_out); |
| 549 | HB_NODISCARD HB_INTERNAL bool shift_forward (unsigned int count); |
| 550 | |
| 551 | typedef long scratch_buffer_t; |
| 552 | HB_INTERNAL scratch_buffer_t *get_scratch_buffer (unsigned int *size); |
| 553 | |
| 554 | void clear_context (unsigned int side) { context_len[side] = 0; } |
| 555 | |
| 556 | HB_INTERNAL void sort (unsigned int start, unsigned int end, int(*compar)(const hb_glyph_info_t *, const hb_glyph_info_t *)); |
| 557 | |
| 558 | bool messaging () |
| 559 | { |
| 560 | #ifdef HB_NO_BUFFER_MESSAGE |
| 561 | return false; |
| 562 | #else |
| 563 | return unlikely (message_func); |
| 564 | #endif |
| 565 | } |
| 566 | bool message (hb_font_t *font, const char *fmt, ...) HB_PRINTF_FUNC(3, 4) |
| 567 | { |
| 568 | #ifdef HB_NO_BUFFER_MESSAGE |
| 569 | return true; |
| 570 | #else |
| 571 | if (likely (!messaging ())) |
| 572 | return true; |
| 573 | |
| 574 | va_list ap; |
| 575 | va_start (ap, fmt); |
| 576 | bool ret = message_impl (font, fmt, ap); |
| 577 | va_end (ap); |
| 578 | |
| 579 | return ret; |
| 580 | #endif |
| 581 | } |
| 582 | HB_INTERNAL bool message_impl (hb_font_t *font, const char *fmt, va_list ap) HB_PRINTF_FUNC(3, 0); |
| 583 | |
| 584 | static void |
| 585 | set_cluster (hb_glyph_info_t &inf, unsigned int cluster, unsigned int mask = 0) |
| 586 | { |
| 587 | if (inf.cluster != cluster) |
| 588 | inf.mask = (inf.mask & ~HB_GLYPH_FLAG_DEFINED) | (mask & HB_GLYPH_FLAG_DEFINED); |
| 589 | inf.cluster = cluster; |
| 590 | } |
| 591 | void |
| 592 | _infos_set_glyph_flags (hb_glyph_info_t *infos, |
| 593 | unsigned int start, unsigned int end, |
| 594 | unsigned int cluster, |
| 595 | hb_mask_t mask) |
| 596 | { |
| 597 | if (unlikely (start == end)) |
| 598 | return; |
| 599 | |
| 600 | unsigned cluster_first = infos[start].cluster; |
| 601 | unsigned cluster_last = infos[end - 1].cluster; |
| 602 | |
| 603 | if (cluster_level == HB_BUFFER_CLUSTER_LEVEL_CHARACTERS || |
| 604 | (cluster != cluster_first && cluster != cluster_last)) |
| 605 | { |
| 606 | for (unsigned int i = start; i < end; i++) |
| 607 | if (cluster != infos[i].cluster) |
| 608 | { |
| 609 | scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS; |
| 610 | infos[i].mask |= mask; |
| 611 | } |
| 612 | return; |
| 613 | } |
| 614 | |
| 615 | /* Monotone clusters */ |
| 616 | |
| 617 | if (cluster == cluster_first) |
| 618 | { |
| 619 | for (unsigned int i = end; start < i && infos[i - 1].cluster != cluster_first; i--) |
| 620 | { |
| 621 | scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS; |
| 622 | infos[i - 1].mask |= mask; |
| 623 | } |
| 624 | } |
| 625 | else /* cluster == cluster_last */ |
| 626 | { |
| 627 | for (unsigned int i = start; i < end && infos[i].cluster != cluster_last; i++) |
| 628 | { |
| 629 | scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GLYPH_FLAGS; |
| 630 | infos[i].mask |= mask; |
| 631 | } |
| 632 | } |
| 633 | } |
| 634 | unsigned |
| 635 | _infos_find_min_cluster (const hb_glyph_info_t *infos, |
| 636 | unsigned start, unsigned end, |
| 637 | unsigned cluster = UINT_MAX) |
| 638 | { |
| 639 | if (unlikely (start == end)) |
| 640 | return cluster; |
| 641 | |
| 642 | if (cluster_level == HB_BUFFER_CLUSTER_LEVEL_CHARACTERS) |
| 643 | { |
| 644 | for (unsigned int i = start; i < end; i++) |
| 645 | cluster = hb_min (cluster, infos[i].cluster); |
| 646 | return cluster; |
| 647 | } |
| 648 | |
| 649 | return hb_min (cluster, hb_min (infos[start].cluster, infos[end - 1].cluster)); |
| 650 | } |
| 651 | |
| 652 | void clear_glyph_flags (hb_mask_t mask = 0) |
| 653 | { |
| 654 | for (unsigned int i = 0; i < len; i++) |
| 655 | info[i].mask = (info[i].mask & ~HB_GLYPH_FLAG_DEFINED) | (mask & HB_GLYPH_FLAG_DEFINED); |
| 656 | } |
| 657 | }; |
| 658 | DECLARE_NULL_INSTANCE (hb_buffer_t); |
| 659 | |
| 660 | |
| 661 | #define foreach_group(buffer, start, end, group_func) \ |
| 662 | for (unsigned int \ |
| 663 | _count = buffer->len, \ |
| 664 | start = 0, end = _count ? buffer->group_end (0, group_func) : 0; \ |
| 665 | start < _count; \ |
| 666 | start = end, end = buffer->group_end (start, group_func)) |
| 667 | |
| 668 | #define foreach_cluster(buffer, start, end) \ |
| 669 | foreach_group (buffer, start, end, hb_buffer_t::_cluster_group_func) |
| 670 | |
| 671 | |
| 672 | #define HB_BUFFER_XALLOCATE_VAR(b, func, var) \ |
| 673 | b->func (offsetof (hb_glyph_info_t, var) - offsetof(hb_glyph_info_t, var1), \ |
| 674 | sizeof (b->info[0].var)) |
| 675 | #define HB_BUFFER_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, allocate_var, var ()) |
| 676 | #define HB_BUFFER_TRY_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, try_allocate_var, var ()) |
| 677 | #define HB_BUFFER_DEALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, deallocate_var, var ()) |
| 678 | #define HB_BUFFER_ASSERT_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, assert_var, var ()) |
| 679 | |
| 680 | |
| 681 | #endif /* HB_BUFFER_HH */ |
| 682 | |