/src/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh
Line | Count | Source (jump to first uncovered line) |
1 | | #ifndef OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH |
2 | | #define OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH |
3 | | |
4 | | #include "MarkMarkPosFormat1.hh" |
5 | | |
6 | | namespace OT { |
7 | | namespace Layout { |
8 | | namespace GPOS_impl { |
9 | | |
10 | | typedef AnchorMatrix Mark2Array; /* mark2-major-- |
11 | | * in order of Mark2Coverage Index--, |
12 | | * mark1-minor-- |
13 | | * ordered by class--zero-based. */ |
14 | | |
15 | | template <typename Types> |
16 | | struct MarkMarkPosFormat1_2 |
17 | | { |
18 | | protected: |
19 | | HBUINT16 format; /* Format identifier--format = 1 */ |
20 | | typename Types::template OffsetTo<Coverage> |
21 | | mark1Coverage; /* Offset to Combining Mark1 Coverage |
22 | | * table--from beginning of MarkMarkPos |
23 | | * subtable */ |
24 | | typename Types::template OffsetTo<Coverage> |
25 | | mark2Coverage; /* Offset to Combining Mark2 Coverage |
26 | | * table--from beginning of MarkMarkPos |
27 | | * subtable */ |
28 | | HBUINT16 classCount; /* Number of defined mark classes */ |
29 | | typename Types::template OffsetTo<MarkArray> |
30 | | mark1Array; /* Offset to Mark1Array table--from |
31 | | * beginning of MarkMarkPos subtable */ |
32 | | typename Types::template OffsetTo<Mark2Array> |
33 | | mark2Array; /* Offset to Mark2Array table--from |
34 | | * beginning of MarkMarkPos subtable */ |
35 | | public: |
36 | | DEFINE_SIZE_STATIC (4 + 4 * Types::size); |
37 | | |
38 | | bool sanitize (hb_sanitize_context_t *c) const |
39 | 51 | { |
40 | 51 | TRACE_SANITIZE (this); |
41 | 51 | return_trace (c->check_struct (this) && |
42 | 51 | mark1Coverage.sanitize (c, this) && |
43 | 51 | mark2Coverage.sanitize (c, this) && |
44 | 51 | mark1Array.sanitize (c, this) && |
45 | 51 | hb_barrier () && |
46 | 51 | mark2Array.sanitize (c, this, (unsigned int) classCount)); |
47 | 51 | } |
48 | | |
49 | | bool intersects (const hb_set_t *glyphs) const |
50 | 0 | { |
51 | 0 | return (this+mark1Coverage).intersects (glyphs) && |
52 | 0 | (this+mark2Coverage).intersects (glyphs); |
53 | 0 | } |
54 | | |
55 | 0 | void closure_lookups (hb_closure_lookups_context_t *c) const {} |
56 | | |
57 | | void collect_variation_indices (hb_collect_variation_indices_context_t *c) const |
58 | 0 | { |
59 | 0 | + hb_zip (this+mark1Coverage, this+mark1Array) |
60 | 0 | | hb_filter (c->glyph_set, hb_first) |
61 | 0 | | hb_map (hb_second) |
62 | 0 | | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); }) |
63 | 0 | ; |
64 | 0 |
|
65 | 0 | hb_map_t klass_mapping; |
66 | 0 | Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping); |
67 | 0 |
|
68 | 0 | unsigned mark2_count = (this+mark2Array).rows; |
69 | 0 | auto mark2_iter = |
70 | 0 | + hb_zip (this+mark2Coverage, hb_range (mark2_count)) |
71 | 0 | | hb_filter (c->glyph_set, hb_first) |
72 | 0 | | hb_map (hb_second) |
73 | 0 | ; |
74 | 0 |
|
75 | 0 | hb_sorted_vector_t<unsigned> mark2_indexes; |
76 | 0 | for (const unsigned row : mark2_iter) |
77 | 0 | { |
78 | 0 | + hb_range ((unsigned) classCount) |
79 | 0 | | hb_filter (klass_mapping) |
80 | 0 | | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) |
81 | 0 | | hb_sink (mark2_indexes) |
82 | 0 | ; |
83 | 0 | } |
84 | 0 | (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ()); |
85 | 0 | } |
86 | | |
87 | | void collect_glyphs (hb_collect_glyphs_context_t *c) const |
88 | 0 | { |
89 | 0 | if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return; |
90 | 0 | if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return; |
91 | 0 | } |
92 | | |
93 | 17 | const Coverage &get_coverage () const { return this+mark1Coverage; } |
94 | | |
95 | | bool apply (hb_ot_apply_context_t *c) const |
96 | 0 | { |
97 | 0 | TRACE_APPLY (this); |
98 | 0 | hb_buffer_t *buffer = c->buffer; |
99 | 0 | unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint); |
100 | 0 | if (likely (mark1_index == NOT_COVERED)) return_trace (false); |
101 | | |
102 | | /* now we search backwards for a suitable mark glyph until a non-mark glyph */ |
103 | 0 | auto &skippy_iter = c->iter_input; |
104 | 0 | skippy_iter.reset_fast (buffer->idx); |
105 | 0 | skippy_iter.set_lookup_props (c->lookup_props & ~(uint32_t)LookupFlag::IgnoreFlags); |
106 | 0 | unsigned unsafe_from; |
107 | 0 | if (unlikely (!skippy_iter.prev (&unsafe_from))) |
108 | 0 | { |
109 | 0 | buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1); |
110 | 0 | return_trace (false); |
111 | 0 | } |
112 | | |
113 | 0 | if (likely (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]))) |
114 | 0 | { |
115 | 0 | buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1); |
116 | 0 | return_trace (false); |
117 | 0 | } |
118 | | |
119 | 0 | unsigned int j = skippy_iter.idx; |
120 | |
|
121 | 0 | unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur()); |
122 | 0 | unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]); |
123 | 0 | unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur()); |
124 | 0 | unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]); |
125 | |
|
126 | 0 | if (likely (id1 == id2)) |
127 | 0 | { |
128 | 0 | if (id1 == 0) /* Marks belonging to the same base. */ |
129 | 0 | goto good; |
130 | 0 | else if (comp1 == comp2) /* Marks belonging to the same ligature component. */ |
131 | 0 | goto good; |
132 | 0 | } |
133 | 0 | else |
134 | 0 | { |
135 | | /* If ligature ids don't match, it may be the case that one of the marks |
136 | | * itself is a ligature. In which case match. */ |
137 | 0 | if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2)) |
138 | 0 | goto good; |
139 | 0 | } |
140 | | |
141 | | /* Didn't match. */ |
142 | 0 | buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1); |
143 | 0 | return_trace (false); |
144 | | |
145 | 0 | good: |
146 | 0 | unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint); |
147 | 0 | if (mark2_index == NOT_COVERED) |
148 | 0 | { |
149 | 0 | buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1); |
150 | 0 | return_trace (false); |
151 | 0 | } |
152 | | |
153 | 0 | return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j)); |
154 | 0 | } |
155 | | |
156 | | bool subset (hb_subset_context_t *c) const |
157 | 0 | { |
158 | 0 | TRACE_SUBSET (this); |
159 | 0 | const hb_set_t &glyphset = *c->plan->glyphset_gsub (); |
160 | 0 | const hb_map_t &glyph_map = *c->plan->glyph_map; |
161 | 0 |
|
162 | 0 | auto *out = c->serializer->start_embed (*this); |
163 | 0 | if (unlikely (!c->serializer->extend_min (out))) return_trace (false); |
164 | 0 | out->format = format; |
165 | 0 |
|
166 | 0 | hb_map_t klass_mapping; |
167 | 0 | Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping); |
168 | 0 |
|
169 | 0 | if (!klass_mapping.get_population ()) return_trace (false); |
170 | 0 | out->classCount = klass_mapping.get_population (); |
171 | 0 |
|
172 | 0 | auto mark1_iter = |
173 | 0 | + hb_zip (this+mark1Coverage, this+mark1Array) |
174 | 0 | | hb_filter (glyphset, hb_first) |
175 | 0 | ; |
176 | 0 |
|
177 | 0 | hb_sorted_vector_t<hb_codepoint_t> new_coverage; |
178 | 0 | + mark1_iter |
179 | 0 | | hb_map (hb_first) |
180 | 0 | | hb_map (glyph_map) |
181 | 0 | | hb_sink (new_coverage) |
182 | 0 | ; |
183 | 0 |
|
184 | 0 | if (!out->mark1Coverage.serialize_serialize (c->serializer, new_coverage.iter ())) |
185 | 0 | return_trace (false); |
186 | 0 |
|
187 | 0 | if (unlikely (!out->mark1Array.serialize_subset (c, mark1Array, this, |
188 | 0 | (this+mark1Coverage).iter (), |
189 | 0 | &klass_mapping))) |
190 | 0 | return_trace (false); |
191 | 0 |
|
192 | 0 | unsigned mark2count = (this+mark2Array).rows; |
193 | 0 | auto mark2_iter = |
194 | 0 | + hb_zip (this+mark2Coverage, hb_range (mark2count)) |
195 | 0 | | hb_filter (glyphset, hb_first) |
196 | 0 | ; |
197 | 0 |
|
198 | 0 | new_coverage.reset (); |
199 | 0 | hb_sorted_vector_t<unsigned> mark2_indexes; |
200 | 0 | auto &mark2_array = (this+mark2Array); |
201 | 0 | for (const auto _ : + mark2_iter) |
202 | 0 | { |
203 | 0 | unsigned row = _.second; |
204 | 0 |
|
205 | 0 | bool non_empty = + hb_range ((unsigned) classCount) |
206 | 0 | | hb_filter (klass_mapping) |
207 | 0 | | hb_map ([&] (const unsigned col) { return !mark2_array.offset_is_null (row, col, (unsigned) classCount); }) |
208 | 0 | | hb_any |
209 | 0 | ; |
210 | 0 |
|
211 | 0 | if (!non_empty) continue; |
212 | 0 |
|
213 | 0 | hb_codepoint_t new_g = glyph_map.get ( _.first); |
214 | 0 | new_coverage.push (new_g); |
215 | 0 |
|
216 | 0 | + hb_range ((unsigned) classCount) |
217 | 0 | | hb_filter (klass_mapping) |
218 | 0 | | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; }) |
219 | 0 | | hb_sink (mark2_indexes) |
220 | 0 | ; |
221 | 0 | } |
222 | 0 |
|
223 | 0 | if (!new_coverage) return_trace (false); |
224 | 0 | if (!out->mark2Coverage.serialize_serialize (c->serializer, new_coverage.iter ())) |
225 | 0 | return_trace (false); |
226 | 0 |
|
227 | 0 | return_trace (out->mark2Array.serialize_subset (c, mark2Array, this, |
228 | 0 | mark2_iter.len (), |
229 | 0 | mark2_indexes.iter ())); |
230 | 0 |
|
231 | 0 | } |
232 | | }; |
233 | | |
234 | | |
235 | | } |
236 | | } |
237 | | } |
238 | | |
239 | | #endif /* OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH */ |