| 1 | /* |
| 2 | * Copyright © 2017 Google, Inc. |
| 3 | * |
| 4 | * This is part of HarfBuzz, a text shaping library. |
| 5 | * |
| 6 | * Permission is hereby granted, without written agreement and without |
| 7 | * license or royalty fees, to use, copy, modify, and distribute this |
| 8 | * software and its documentation for any purpose, provided that the |
| 9 | * above copyright notice and the following two paragraphs appear in |
| 10 | * all copies of this software. |
| 11 | * |
| 12 | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
| 13 | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
| 14 | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
| 15 | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
| 16 | * DAMAGE. |
| 17 | * |
| 18 | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
| 19 | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
| 20 | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
| 21 | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
| 22 | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
| 23 | * |
| 24 | * Google Author(s): Behdad Esfahbod |
| 25 | */ |
| 26 | |
| 27 | #ifndef HB_OT_VAR_HVAR_TABLE_HH |
| 28 | #define HB_OT_VAR_HVAR_TABLE_HH |
| 29 | |
| 30 | #include "hb-ot-layout-common.hh" |
| 31 | |
| 32 | |
| 33 | namespace OT { |
| 34 | |
| 35 | |
| 36 | struct DeltaSetIndexMap |
| 37 | { |
| 38 | bool sanitize (hb_sanitize_context_t *c) const |
| 39 | { |
| 40 | TRACE_SANITIZE (this); |
| 41 | return_trace (c->check_struct (this) && |
| 42 | c->check_range (mapDataZ.arrayZ, |
| 43 | mapCount, |
| 44 | get_width ())); |
| 45 | } |
| 46 | |
| 47 | template <typename T> |
| 48 | bool serialize (hb_serialize_context_t *c, const T &plan) |
| 49 | { |
| 50 | unsigned int width = plan.get_width (); |
| 51 | unsigned int inner_bit_count = plan.get_inner_bit_count (); |
| 52 | const hb_array_t<const unsigned int> output_map = plan.get_output_map (); |
| 53 | |
| 54 | TRACE_SERIALIZE (this); |
| 55 | if (unlikely (output_map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0)))) |
| 56 | return_trace (false); |
| 57 | if (unlikely (!c->extend_min (*this))) return_trace (false); |
| 58 | |
| 59 | format = ((width-1)<<4)|(inner_bit_count-1); |
| 60 | mapCount = output_map.length; |
| 61 | HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length); |
| 62 | if (unlikely (!p)) return_trace (false); |
| 63 | for (unsigned int i = 0; i < output_map.length; i++) |
| 64 | { |
| 65 | unsigned int v = output_map[i]; |
| 66 | unsigned int outer = v >> 16; |
| 67 | unsigned int inner = v & 0xFFFF; |
| 68 | unsigned int u = (outer << inner_bit_count) | inner; |
| 69 | for (unsigned int w = width; w > 0;) |
| 70 | { |
| 71 | p[--w] = u; |
| 72 | u >>= 8; |
| 73 | } |
| 74 | p += width; |
| 75 | } |
| 76 | return_trace (true); |
| 77 | } |
| 78 | |
| 79 | unsigned int map (unsigned int v) const /* Returns 16.16 outer.inner. */ |
| 80 | { |
| 81 | /* If count is zero, pass value unchanged. This takes |
| 82 | * care of direct mapping for advance map. */ |
| 83 | if (!mapCount) |
| 84 | return v; |
| 85 | |
| 86 | if (v >= mapCount) |
| 87 | v = mapCount - 1; |
| 88 | |
| 89 | unsigned int u = 0; |
| 90 | { /* Fetch it. */ |
| 91 | unsigned int w = get_width (); |
| 92 | const HBUINT8 *p = mapDataZ.arrayZ + w * v; |
| 93 | for (; w; w--) |
| 94 | u = (u << 8) + *p++; |
| 95 | } |
| 96 | |
| 97 | { /* Repack it. */ |
| 98 | unsigned int n = get_inner_bit_count (); |
| 99 | unsigned int outer = u >> n; |
| 100 | unsigned int inner = u & ((1 << n) - 1); |
| 101 | u = (outer<<16) | inner; |
| 102 | } |
| 103 | |
| 104 | return u; |
| 105 | } |
| 106 | |
| 107 | unsigned int get_map_count () const { return mapCount; } |
| 108 | unsigned int get_width () const { return ((format >> 4) & 3) + 1; } |
| 109 | unsigned int get_inner_bit_count () const { return (format & 0xF) + 1; } |
| 110 | |
| 111 | protected: |
| 112 | HBUINT16 format; /* A packed field that describes the compressed |
| 113 | * representation of delta-set indices. */ |
| 114 | HBUINT16 mapCount; /* The number of mapping entries. */ |
| 115 | UnsizedArrayOf<HBUINT8> |
| 116 | mapDataZ; /* The delta-set index mapping data. */ |
| 117 | |
| 118 | public: |
| 119 | DEFINE_SIZE_ARRAY (4, mapDataZ); |
| 120 | }; |
| 121 | |
| 122 | struct index_map_subset_plan_t |
| 123 | { |
| 124 | enum index_map_index_t { |
| 125 | ADV_INDEX, |
| 126 | LSB_INDEX, /* dual as TSB */ |
| 127 | RSB_INDEX, /* dual as BSB */ |
| 128 | VORG_INDEX |
| 129 | }; |
| 130 | |
| 131 | void init (const DeltaSetIndexMap &index_map, |
| 132 | hb_inc_bimap_t &outer_map, |
| 133 | hb_vector_t<hb_set_t *> &inner_sets, |
| 134 | const hb_subset_plan_t *plan) |
| 135 | { |
| 136 | map_count = 0; |
| 137 | outer_bit_count = 0; |
| 138 | inner_bit_count = 1; |
| 139 | max_inners.init (); |
| 140 | output_map.init (); |
| 141 | |
| 142 | if (&index_map == &Null (DeltaSetIndexMap)) return; |
| 143 | |
| 144 | unsigned int last_val = (unsigned int)-1; |
| 145 | hb_codepoint_t last_gid = (hb_codepoint_t)-1; |
| 146 | hb_codepoint_t gid = (hb_codepoint_t) hb_min (index_map.get_map_count (), plan->num_output_glyphs ()); |
| 147 | |
| 148 | outer_bit_count = (index_map.get_width () * 8) - index_map.get_inner_bit_count (); |
| 149 | max_inners.resize (inner_sets.length); |
| 150 | for (unsigned i = 0; i < inner_sets.length; i++) max_inners[i] = 0; |
| 151 | |
| 152 | /* Search backwards for a map value different from the last map value */ |
| 153 | for (; gid > 0; gid--) |
| 154 | { |
| 155 | hb_codepoint_t old_gid; |
| 156 | if (!plan->old_gid_for_new_gid (gid - 1, &old_gid)) |
| 157 | { |
| 158 | if (last_gid == (hb_codepoint_t) -1) |
| 159 | continue; |
| 160 | else |
| 161 | break; |
| 162 | } |
| 163 | |
| 164 | unsigned int v = index_map.map (old_gid); |
| 165 | if (last_gid == (hb_codepoint_t) -1) |
| 166 | { |
| 167 | last_val = v; |
| 168 | last_gid = gid; |
| 169 | continue; |
| 170 | } |
| 171 | if (v != last_val) break; |
| 172 | |
| 173 | last_gid = gid; |
| 174 | } |
| 175 | |
| 176 | if (unlikely (last_gid == (hb_codepoint_t)-1)) return; |
| 177 | map_count = last_gid; |
| 178 | for (gid = 0; gid < map_count; gid++) |
| 179 | { |
| 180 | hb_codepoint_t old_gid; |
| 181 | if (plan->old_gid_for_new_gid (gid, &old_gid)) |
| 182 | { |
| 183 | unsigned int v = index_map.map (old_gid); |
| 184 | unsigned int outer = v >> 16; |
| 185 | unsigned int inner = v & 0xFFFF; |
| 186 | outer_map.add (outer); |
| 187 | if (inner > max_inners[outer]) max_inners[outer] = inner; |
| 188 | if (outer >= inner_sets.length) return; |
| 189 | inner_sets[outer]->add (inner); |
| 190 | } |
| 191 | } |
| 192 | } |
| 193 | |
| 194 | void fini () |
| 195 | { |
| 196 | max_inners.fini (); |
| 197 | output_map.fini (); |
| 198 | } |
| 199 | |
| 200 | void remap (const DeltaSetIndexMap *input_map, |
| 201 | const hb_inc_bimap_t &outer_map, |
| 202 | const hb_vector_t<hb_inc_bimap_t> &inner_maps, |
| 203 | const hb_subset_plan_t *plan) |
| 204 | { |
| 205 | if (input_map == &Null (DeltaSetIndexMap)) return; |
| 206 | |
| 207 | for (unsigned int i = 0; i < max_inners.length; i++) |
| 208 | { |
| 209 | if (inner_maps[i].get_population () == 0) continue; |
| 210 | unsigned int bit_count = (max_inners[i]==0)? 1: hb_bit_storage (inner_maps[i][max_inners[i]]); |
| 211 | if (bit_count > inner_bit_count) inner_bit_count = bit_count; |
| 212 | } |
| 213 | |
| 214 | output_map.resize (map_count); |
| 215 | for (hb_codepoint_t gid = 0; gid < output_map.length; gid++) |
| 216 | { |
| 217 | hb_codepoint_t old_gid; |
| 218 | if (plan->old_gid_for_new_gid (gid, &old_gid)) |
| 219 | { |
| 220 | unsigned int v = input_map->map (old_gid); |
| 221 | unsigned int outer = v >> 16; |
| 222 | output_map[gid] = (outer_map[outer] << 16) | (inner_maps[outer][v & 0xFFFF]); |
| 223 | } |
| 224 | else |
| 225 | output_map[gid] = 0; /* Map unused glyph to outer/inner=0/0 */ |
| 226 | } |
| 227 | } |
| 228 | |
| 229 | unsigned int get_inner_bit_count () const { return inner_bit_count; } |
| 230 | unsigned int get_width () const { return ((outer_bit_count + inner_bit_count + 7) / 8); } |
| 231 | unsigned int get_map_count () const { return map_count; } |
| 232 | |
| 233 | unsigned int get_size () const |
| 234 | { return (map_count? (DeltaSetIndexMap::min_size + get_width () * map_count): 0); } |
| 235 | |
| 236 | bool is_identity () const { return get_output_map ().length == 0; } |
| 237 | hb_array_t<const unsigned int> get_output_map () const { return output_map.as_array (); } |
| 238 | |
| 239 | protected: |
| 240 | unsigned int map_count; |
| 241 | hb_vector_t<unsigned int> max_inners; |
| 242 | unsigned int outer_bit_count; |
| 243 | unsigned int inner_bit_count; |
| 244 | hb_vector_t<unsigned int> output_map; |
| 245 | }; |
| 246 | |
| 247 | struct hvarvvar_subset_plan_t |
| 248 | { |
| 249 | hvarvvar_subset_plan_t() : inner_maps (), index_map_plans () {} |
| 250 | ~hvarvvar_subset_plan_t() { fini (); } |
| 251 | |
| 252 | void init (const hb_array_t<const DeltaSetIndexMap *> &index_maps, |
| 253 | const VariationStore &_var_store, |
| 254 | const hb_subset_plan_t *plan) |
| 255 | { |
| 256 | index_map_plans.resize (index_maps.length); |
| 257 | |
| 258 | var_store = &_var_store; |
| 259 | inner_sets.resize (var_store->get_sub_table_count ()); |
| 260 | for (unsigned int i = 0; i < inner_sets.length; i++) |
| 261 | inner_sets[i] = hb_set_create (); |
| 262 | adv_set = hb_set_create (); |
| 263 | |
| 264 | inner_maps.resize (var_store->get_sub_table_count ()); |
| 265 | |
| 266 | for (unsigned int i = 0; i < inner_maps.length; i++) |
| 267 | inner_maps[i].init (); |
| 268 | |
| 269 | if (unlikely (!index_map_plans.length || !inner_sets.length || !inner_maps.length)) return; |
| 270 | |
| 271 | bool retain_adv_map = false; |
| 272 | index_map_plans[0].init (*index_maps[0], outer_map, inner_sets, plan); |
| 273 | if (index_maps[0] == &Null (DeltaSetIndexMap)) |
| 274 | { |
| 275 | retain_adv_map = plan->retain_gids; |
| 276 | outer_map.add (0); |
| 277 | for (hb_codepoint_t gid = 0; gid < plan->num_output_glyphs (); gid++) |
| 278 | { |
| 279 | hb_codepoint_t old_gid; |
| 280 | if (plan->old_gid_for_new_gid (gid, &old_gid)) |
| 281 | inner_sets[0]->add (old_gid); |
| 282 | } |
| 283 | hb_set_union (adv_set, inner_sets[0]); |
| 284 | } |
| 285 | |
| 286 | for (unsigned int i = 1; i < index_maps.length; i++) |
| 287 | index_map_plans[i].init (*index_maps[i], outer_map, inner_sets, plan); |
| 288 | |
| 289 | outer_map.sort (); |
| 290 | |
| 291 | if (retain_adv_map) |
| 292 | { |
| 293 | for (hb_codepoint_t gid = 0; gid < plan->num_output_glyphs (); gid++) |
| 294 | if (inner_sets[0]->has (gid)) |
| 295 | inner_maps[0].add (gid); |
| 296 | else |
| 297 | inner_maps[0].skip (); |
| 298 | } |
| 299 | else |
| 300 | { |
| 301 | inner_maps[0].add_set (adv_set); |
| 302 | hb_set_subtract (inner_sets[0], adv_set); |
| 303 | inner_maps[0].add_set (inner_sets[0]); |
| 304 | } |
| 305 | |
| 306 | for (unsigned int i = 1; i < inner_maps.length; i++) |
| 307 | inner_maps[i].add_set (inner_sets[i]); |
| 308 | |
| 309 | for (unsigned int i = 0; i < index_maps.length; i++) |
| 310 | index_map_plans[i].remap (index_maps[i], outer_map, inner_maps, plan); |
| 311 | } |
| 312 | |
| 313 | void fini () |
| 314 | { |
| 315 | for (unsigned int i = 0; i < inner_sets.length; i++) |
| 316 | hb_set_destroy (inner_sets[i]); |
| 317 | hb_set_destroy (adv_set); |
| 318 | inner_maps.fini_deep (); |
| 319 | index_map_plans.fini_deep (); |
| 320 | } |
| 321 | |
| 322 | hb_inc_bimap_t outer_map; |
| 323 | hb_vector_t<hb_inc_bimap_t> inner_maps; |
| 324 | hb_vector_t<index_map_subset_plan_t> index_map_plans; |
| 325 | const VariationStore *var_store; |
| 326 | |
| 327 | protected: |
| 328 | hb_vector_t<hb_set_t *> inner_sets; |
| 329 | hb_set_t *adv_set; |
| 330 | }; |
| 331 | |
| 332 | /* |
| 333 | * HVAR -- Horizontal Metrics Variations |
| 334 | * https://docs.microsoft.com/en-us/typography/opentype/spec/hvar |
| 335 | * VVAR -- Vertical Metrics Variations |
| 336 | * https://docs.microsoft.com/en-us/typography/opentype/spec/vvar |
| 337 | */ |
| 338 | #define HB_OT_TAG_HVAR HB_TAG('H','V','A','R') |
| 339 | #define HB_OT_TAG_VVAR HB_TAG('V','V','A','R') |
| 340 | |
| 341 | struct HVARVVAR |
| 342 | { |
| 343 | static constexpr hb_tag_t HVARTag = HB_OT_TAG_HVAR; |
| 344 | static constexpr hb_tag_t VVARTag = HB_OT_TAG_VVAR; |
| 345 | |
| 346 | bool sanitize (hb_sanitize_context_t *c) const |
| 347 | { |
| 348 | TRACE_SANITIZE (this); |
| 349 | return_trace (version.sanitize (c) && |
| 350 | likely (version.major == 1) && |
| 351 | varStore.sanitize (c, this) && |
| 352 | advMap.sanitize (c, this) && |
| 353 | lsbMap.sanitize (c, this) && |
| 354 | rsbMap.sanitize (c, this)); |
| 355 | } |
| 356 | |
| 357 | void listup_index_maps (hb_vector_t<const DeltaSetIndexMap *> &index_maps) const |
| 358 | { |
| 359 | index_maps.push (&(this+advMap)); |
| 360 | index_maps.push (&(this+lsbMap)); |
| 361 | index_maps.push (&(this+rsbMap)); |
| 362 | } |
| 363 | |
| 364 | bool serialize_index_maps (hb_serialize_context_t *c, |
| 365 | const hb_array_t<index_map_subset_plan_t> &im_plans) |
| 366 | { |
| 367 | TRACE_SERIALIZE (this); |
| 368 | if (im_plans[index_map_subset_plan_t::ADV_INDEX].is_identity ()) |
| 369 | advMap = 0; |
| 370 | else if (unlikely (!advMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::ADV_INDEX]))) |
| 371 | return_trace (false); |
| 372 | if (im_plans[index_map_subset_plan_t::LSB_INDEX].is_identity ()) |
| 373 | lsbMap = 0; |
| 374 | else if (unlikely (!lsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::LSB_INDEX]))) |
| 375 | return_trace (false); |
| 376 | if (im_plans[index_map_subset_plan_t::RSB_INDEX].is_identity ()) |
| 377 | rsbMap = 0; |
| 378 | else if (unlikely (!rsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::RSB_INDEX]))) |
| 379 | return_trace (false); |
| 380 | |
| 381 | return_trace (true); |
| 382 | } |
| 383 | |
| 384 | template <typename T> |
| 385 | bool _subset (hb_subset_context_t *c) const |
| 386 | { |
| 387 | TRACE_SUBSET (this); |
| 388 | hvarvvar_subset_plan_t hvar_plan; |
| 389 | hb_vector_t<const DeltaSetIndexMap *> |
| 390 | index_maps; |
| 391 | |
| 392 | ((T*)this)->listup_index_maps (index_maps); |
| 393 | hvar_plan.init (index_maps.as_array (), this+varStore, c->plan); |
| 394 | |
| 395 | T *out = c->serializer->allocate_min<T> (); |
| 396 | if (unlikely (!out)) return_trace (false); |
| 397 | |
| 398 | out->version.major = 1; |
| 399 | out->version.minor = 0; |
| 400 | |
| 401 | if (unlikely (!out->varStore.serialize (c->serializer, out) |
| 402 | .serialize (c->serializer, hvar_plan.var_store, hvar_plan.inner_maps.as_array ()))) |
| 403 | return_trace (false); |
| 404 | |
| 405 | return_trace (out->T::serialize_index_maps (c->serializer, |
| 406 | hvar_plan.index_map_plans.as_array ())); |
| 407 | } |
| 408 | |
| 409 | float get_advance_var (hb_codepoint_t glyph, hb_font_t *font) const |
| 410 | { |
| 411 | unsigned int varidx = (this+advMap).map (glyph); |
| 412 | return (this+varStore).get_delta (varidx, font->coords, font->num_coords); |
| 413 | } |
| 414 | |
| 415 | float get_side_bearing_var (hb_codepoint_t glyph, |
| 416 | const int *coords, unsigned int coord_count) const |
| 417 | { |
| 418 | if (!has_side_bearing_deltas ()) return 0.f; |
| 419 | unsigned int varidx = (this+lsbMap).map (glyph); |
| 420 | return (this+varStore).get_delta (varidx, coords, coord_count); |
| 421 | } |
| 422 | |
| 423 | bool has_side_bearing_deltas () const { return lsbMap && rsbMap; } |
| 424 | |
| 425 | protected: |
| 426 | FixedVersion<>version; /* Version of the metrics variation table |
| 427 | * initially set to 0x00010000u */ |
| 428 | LOffsetTo<VariationStore> |
| 429 | varStore; /* Offset to item variation store table. */ |
| 430 | LOffsetTo<DeltaSetIndexMap> |
| 431 | advMap; /* Offset to advance var-idx mapping. */ |
| 432 | LOffsetTo<DeltaSetIndexMap> |
| 433 | lsbMap; /* Offset to lsb/tsb var-idx mapping. */ |
| 434 | LOffsetTo<DeltaSetIndexMap> |
| 435 | rsbMap; /* Offset to rsb/bsb var-idx mapping. */ |
| 436 | |
| 437 | public: |
| 438 | DEFINE_SIZE_STATIC (20); |
| 439 | }; |
| 440 | |
| 441 | struct HVAR : HVARVVAR { |
| 442 | static constexpr hb_tag_t tableTag = HB_OT_TAG_HVAR; |
| 443 | bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset<HVAR> (c); } |
| 444 | }; |
| 445 | struct VVAR : HVARVVAR { |
| 446 | static constexpr hb_tag_t tableTag = HB_OT_TAG_VVAR; |
| 447 | |
| 448 | bool sanitize (hb_sanitize_context_t *c) const |
| 449 | { |
| 450 | TRACE_SANITIZE (this); |
| 451 | return_trace (static_cast<const HVARVVAR *> (this)->sanitize (c) && |
| 452 | vorgMap.sanitize (c, this)); |
| 453 | } |
| 454 | |
| 455 | void listup_index_maps (hb_vector_t<const DeltaSetIndexMap *> &index_maps) const |
| 456 | { |
| 457 | HVARVVAR::listup_index_maps (index_maps); |
| 458 | index_maps.push (&(this+vorgMap)); |
| 459 | } |
| 460 | |
| 461 | bool serialize_index_maps (hb_serialize_context_t *c, |
| 462 | const hb_array_t<index_map_subset_plan_t> &im_plans) |
| 463 | { |
| 464 | TRACE_SERIALIZE (this); |
| 465 | if (unlikely (!HVARVVAR::serialize_index_maps (c, im_plans))) |
| 466 | return_trace (false); |
| 467 | if (!im_plans[index_map_subset_plan_t::VORG_INDEX].get_map_count ()) |
| 468 | vorgMap = 0; |
| 469 | else if (unlikely (!vorgMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::VORG_INDEX]))) |
| 470 | return_trace (false); |
| 471 | |
| 472 | return_trace (true); |
| 473 | } |
| 474 | |
| 475 | bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset<VVAR> (c); } |
| 476 | |
| 477 | protected: |
| 478 | LOffsetTo<DeltaSetIndexMap> |
| 479 | vorgMap; /* Offset to vertical-origin var-idx mapping. */ |
| 480 | |
| 481 | public: |
| 482 | DEFINE_SIZE_STATIC (24); |
| 483 | }; |
| 484 | |
| 485 | } /* namespace OT */ |
| 486 | |
| 487 | |
| 488 | #endif /* HB_OT_VAR_HVAR_TABLE_HH */ |
| 489 | |