1 | #ifndef OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH |
2 | #define OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH |
3 | |
4 | #include "LigatureArray.hh" |
5 | |
6 | namespace OT { |
7 | namespace Layout { |
8 | namespace GPOS_impl { |
9 | |
10 | |
11 | template <typename Types> |
12 | struct MarkLigPosFormat1_2 |
13 | { |
14 | protected: |
15 | HBUINT16 format; /* Format identifier--format = 1 */ |
16 | typename Types::template OffsetTo<Coverage> |
17 | markCoverage; /* Offset to Mark Coverage table--from |
18 | * beginning of MarkLigPos subtable */ |
19 | typename Types::template OffsetTo<Coverage> |
20 | ligatureCoverage; /* Offset to Ligature Coverage |
21 | * table--from beginning of MarkLigPos |
22 | * subtable */ |
23 | HBUINT16 classCount; /* Number of defined mark classes */ |
24 | typename Types::template OffsetTo<MarkArray> |
25 | markArray; /* Offset to MarkArray table--from |
26 | * beginning of MarkLigPos subtable */ |
27 | typename Types::template OffsetTo<LigatureArray> |
28 | ligatureArray; /* Offset to LigatureArray table--from |
29 | * beginning of MarkLigPos subtable */ |
30 | public: |
31 | DEFINE_SIZE_STATIC (4 + 4 * Types::size); |
32 | |
33 | bool sanitize (hb_sanitize_context_t *c) const |
34 | { |
35 | TRACE_SANITIZE (this); |
36 | return_trace (c->check_struct (this) && |
37 | markCoverage.sanitize (c, this) && |
38 | ligatureCoverage.sanitize (c, this) && |
39 | markArray.sanitize (c, this) && |
40 | ligatureArray.sanitize (c, this, (unsigned int) classCount)); |
41 | } |
42 | |
43 | bool intersects (const hb_set_t *glyphs) const |
44 | { |
45 | return (this+markCoverage).intersects (glyphs) && |
46 | (this+ligatureCoverage).intersects (glyphs); |
47 | } |
48 | |
49 | void closure_lookups (hb_closure_lookups_context_t *c) const {} |
50 | |
51 | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const |
52 | { |
53 | + hb_zip (this+markCoverage, this+markArray) |
54 | | hb_filter (c->glyph_set, hb_first) |
55 | | hb_map (hb_second) |
56 | | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); }) |
57 | ; |
58 | |
59 | hb_map_t klass_mapping; |
60 | Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping); |
61 | |
62 | unsigned ligcount = (this+ligatureArray).len; |
63 | auto lig_iter = |
64 | + hb_zip (this+ligatureCoverage, hb_range (ligcount)) |
65 | | hb_filter (c->glyph_set, hb_first) |
66 | | hb_map (hb_second) |
67 | ; |
68 | |
69 | const LigatureArray& lig_array = this+ligatureArray; |
70 | for (const unsigned i : lig_iter) |
71 | { |
72 | hb_sorted_vector_t<unsigned> lig_indexes; |
73 | unsigned row_count = lig_array[i].rows; |
74 | for (unsigned row : + hb_range (row_count)) |
75 | { |
76 | + hb_range ((unsigned) classCount) |
77 | | hb_filter (klass_mapping) |
78 | | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) |
79 | | hb_sink (lig_indexes) |
80 | ; |
81 | } |
82 | |
83 | lig_array[i].collect_variation_indices (c, lig_indexes.iter ()); |
84 | } |
85 | } |
86 | |
87 | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
88 | { |
89 | if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return; |
90 | if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return; |
91 | } |
92 | |
93 | const Coverage &get_coverage () const { return this+markCoverage; } |
94 | |
95 | bool apply (hb_ot_apply_context_t *c) const |
96 | { |
97 | TRACE_APPLY (this); |
98 | hb_buffer_t *buffer = c->buffer; |
99 | unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint); |
100 | if (likely (mark_index == NOT_COVERED)) return_trace (false); |
101 | |
102 | /* Now we search backwards for a non-mark glyph */ |
103 | |
104 | hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; |
105 | skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks); |
106 | |
107 | if (c->last_base_until > buffer->idx) |
108 | { |
109 | c->last_base_until = 0; |
110 | c->last_base = -1; |
111 | } |
112 | unsigned j; |
113 | for (j = buffer->idx; j > c->last_base_until; j--) |
114 | { |
115 | auto match = skippy_iter.match (buffer->info[j - 1]); |
116 | if (match == skippy_iter.MATCH) |
117 | { |
118 | c->last_base = (signed) j - 1; |
119 | break; |
120 | } |
121 | } |
122 | c->last_base_until = buffer->idx; |
123 | if (c->last_base == -1) |
124 | { |
125 | buffer->unsafe_to_concat_from_outbuffer (0, buffer->idx + 1); |
126 | return_trace (false); |
127 | } |
128 | |
129 | unsigned idx = (unsigned) c->last_base; |
130 | |
131 | /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */ |
132 | //if (!_hb_glyph_info_is_ligature (&buffer->info[idx])) { return_trace (false); } |
133 | |
134 | unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[idx].codepoint); |
135 | if (lig_index == NOT_COVERED) |
136 | { |
137 | buffer->unsafe_to_concat_from_outbuffer (idx, buffer->idx + 1); |
138 | return_trace (false); |
139 | } |
140 | |
141 | const LigatureArray& lig_array = this+ligatureArray; |
142 | const LigatureAttach& lig_attach = lig_array[lig_index]; |
143 | |
144 | /* Find component to attach to */ |
145 | unsigned int comp_count = lig_attach.rows; |
146 | if (unlikely (!comp_count)) |
147 | { |
148 | buffer->unsafe_to_concat_from_outbuffer (idx, buffer->idx + 1); |
149 | return_trace (false); |
150 | } |
151 | |
152 | /* We must now check whether the ligature ID of the current mark glyph |
153 | * is identical to the ligature ID of the found ligature. If yes, we |
154 | * can directly use the component index. If not, we attach the mark |
155 | * glyph to the last component of the ligature. */ |
156 | unsigned int comp_index; |
157 | unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[idx]); |
158 | unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur()); |
159 | unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
160 | if (lig_id && lig_id == mark_id && mark_comp > 0) |
161 | comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1; |
162 | else |
163 | comp_index = comp_count - 1; |
164 | |
165 | return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, idx)); |
166 | } |
167 | |
168 | bool subset (hb_subset_context_t *c) const |
169 | { |
170 | TRACE_SUBSET (this); |
171 | const hb_set_t &glyphset = *c->plan->glyphset_gsub (); |
172 | const hb_map_t &glyph_map = *c->plan->glyph_map; |
173 | |
174 | auto *out = c->serializer->start_embed (*this); |
175 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
176 | out->format = format; |
177 | |
178 | hb_map_t klass_mapping; |
179 | Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping); |
180 | |
181 | if (!klass_mapping.get_population ()) return_trace (false); |
182 | out->classCount = klass_mapping.get_population (); |
183 | |
184 | auto mark_iter = |
185 | + hb_zip (this+markCoverage, this+markArray) |
186 | | hb_filter (glyphset, hb_first) |
187 | ; |
188 | |
189 | auto new_mark_coverage = |
190 | + mark_iter |
191 | | hb_map_retains_sorting (hb_first) |
192 | | hb_map_retains_sorting (glyph_map) |
193 | ; |
194 | |
195 | if (!out->markCoverage.serialize_serialize (c->serializer, new_mark_coverage)) |
196 | return_trace (false); |
197 | |
198 | out->markArray.serialize_subset (c, markArray, this, |
199 | (this+markCoverage).iter (), |
200 | &klass_mapping); |
201 | |
202 | auto new_ligature_coverage = |
203 | + hb_iter (this + ligatureCoverage) |
204 | | hb_filter (glyphset) |
205 | | hb_map_retains_sorting (glyph_map) |
206 | ; |
207 | |
208 | if (!out->ligatureCoverage.serialize_serialize (c->serializer, new_ligature_coverage)) |
209 | return_trace (false); |
210 | |
211 | out->ligatureArray.serialize_subset (c, ligatureArray, this, |
212 | hb_iter (this+ligatureCoverage), classCount, &klass_mapping); |
213 | |
214 | return_trace (true); |
215 | } |
216 | |
217 | }; |
218 | |
219 | } |
220 | } |
221 | } |
222 | |
223 | #endif /* OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH */ |
224 | |