/src/harfbuzz/src/hb-ot-var-gvar-table.hh
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright © 2019 Adobe Inc. |
3 | | * Copyright © 2019 Ebrahim Byagowi |
4 | | * |
5 | | * This is part of HarfBuzz, a text shaping library. |
6 | | * |
7 | | * Permission is hereby granted, without written agreement and without |
8 | | * license or royalty fees, to use, copy, modify, and distribute this |
9 | | * software and its documentation for any purpose, provided that the |
10 | | * above copyright notice and the following two paragraphs appear in |
11 | | * all copies of this software. |
12 | | * |
13 | | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | | * DAMAGE. |
18 | | * |
19 | | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | | * |
25 | | * Adobe Author(s): Michiharu Ariza |
26 | | */ |
27 | | |
28 | | #ifndef HB_OT_VAR_GVAR_TABLE_HH |
29 | | #define HB_OT_VAR_GVAR_TABLE_HH |
30 | | |
31 | | #include "hb-decycler.hh" |
32 | | #include "hb-open-type.hh" |
33 | | #include "hb-ot-var-common.hh" |
34 | | |
35 | | /* |
36 | | * gvar -- Glyph Variation Table |
37 | | * https://docs.microsoft.com/en-us/typography/opentype/spec/gvar |
38 | | */ |
39 | | #define HB_OT_TAG_gvar HB_TAG('g','v','a','r') |
40 | | #define HB_OT_TAG_GVAR HB_TAG('G','V','A','R') |
41 | | |
42 | | struct hb_glyf_scratch_t |
43 | | { |
44 | | // glyf |
45 | | contour_point_vector_t all_points; |
46 | | contour_point_vector_t comp_points; |
47 | | hb_decycler_t decycler; |
48 | | |
49 | | // gvar |
50 | | contour_point_vector_t orig_points; |
51 | | hb_vector_t<int> x_deltas; |
52 | | hb_vector_t<int> y_deltas; |
53 | | contour_point_vector_t deltas; |
54 | | hb_vector_t<unsigned int> shared_indices; |
55 | | hb_vector_t<unsigned int> private_indices; |
56 | | }; |
57 | | |
58 | | namespace OT { |
59 | | |
60 | | template <typename OffsetType> |
61 | | struct glyph_variations_t |
62 | | { |
63 | | // TODO: Move tuple_variations_t to outside of TupleVariationData |
64 | | using tuple_variations_t = typename TupleVariationData<OffsetType>::tuple_variations_t; |
65 | | using GlyphVariationData = TupleVariationData<OffsetType>; |
66 | | |
67 | | hb_vector_t<tuple_variations_t> glyph_variations; |
68 | | |
69 | | hb_vector_t<char> compiled_shared_tuples; |
70 | | private: |
71 | | unsigned shared_tuples_count = 0; |
72 | | |
73 | | /* shared coords-> index map after instantiation */ |
74 | | hb_hashmap_t<const hb_vector_t<char>*, unsigned> shared_tuples_idx_map; |
75 | | |
76 | | public: |
77 | | unsigned compiled_shared_tuples_count () const |
78 | 18 | { return shared_tuples_count; } |
79 | | |
80 | | unsigned compiled_byte_size () const |
81 | 18 | { |
82 | 18 | unsigned byte_size = 0; |
83 | 18 | for (const auto& _ : glyph_variations) |
84 | 18 | byte_size += _.get_compiled_byte_size (); |
85 | | |
86 | 18 | return byte_size; |
87 | 18 | } |
88 | | |
89 | | bool create_from_glyphs_var_data (unsigned axis_count, |
90 | | const hb_array_t<const F2DOT14> shared_tuples, |
91 | | const hb_subset_plan_t *plan, |
92 | | const hb_hashmap_t<hb_codepoint_t, hb_bytes_t>& new_gid_var_data_map) |
93 | 133 | { |
94 | 133 | if (unlikely (!glyph_variations.alloc_exact (plan->new_to_old_gid_list.length))) |
95 | 0 | return false; |
96 | | |
97 | 133 | auto it = hb_iter (plan->new_to_old_gid_list); |
98 | 133 | for (auto &_ : it) |
99 | 564 | { |
100 | 564 | hb_codepoint_t new_gid = _.first; |
101 | 564 | contour_point_vector_t *all_contour_points; |
102 | 564 | if (!new_gid_var_data_map.has (new_gid) || |
103 | 564 | !plan->new_gid_contour_points_map.has (new_gid, &all_contour_points)) |
104 | 0 | return false; |
105 | 564 | hb_bytes_t var_data = new_gid_var_data_map.get (new_gid); |
106 | | |
107 | 564 | const GlyphVariationData* p = reinterpret_cast<const GlyphVariationData*> (var_data.arrayZ); |
108 | 564 | typename GlyphVariationData::tuple_iterator_t iterator; |
109 | 564 | tuple_variations_t tuple_vars; |
110 | | |
111 | 564 | hb_vector_t<unsigned> shared_indices; |
112 | | |
113 | | /* in case variation data is empty, push an empty struct into the vector, |
114 | | * keep the vector in sync with the new_to_old_gid_list */ |
115 | 564 | if (!var_data || ! p->has_data () || !all_contour_points->length || |
116 | 564 | !GlyphVariationData::get_tuple_iterator (var_data, axis_count, |
117 | 109 | var_data.arrayZ, |
118 | 109 | shared_indices, &iterator)) |
119 | 488 | { |
120 | 488 | glyph_variations.push (std::move (tuple_vars)); |
121 | 488 | continue; |
122 | 488 | } |
123 | | |
124 | 76 | bool is_composite_glyph = false; |
125 | 76 | is_composite_glyph = plan->composite_new_gids.has (new_gid); |
126 | | |
127 | 76 | if (!p->decompile_tuple_variations (all_contour_points->length, true /* is_gvar */, |
128 | 76 | iterator, &(plan->axes_old_index_tag_map), |
129 | 76 | shared_indices, shared_tuples, |
130 | 76 | tuple_vars, /* OUT */ |
131 | 76 | is_composite_glyph)) |
132 | 54 | return false; |
133 | 22 | glyph_variations.push (std::move (tuple_vars)); |
134 | 22 | } |
135 | 79 | return !glyph_variations.in_error () && glyph_variations.length == plan->new_to_old_gid_list.length; |
136 | 133 | } |
137 | | |
138 | | bool instantiate (const hb_subset_plan_t *plan) |
139 | 79 | { |
140 | 79 | unsigned count = plan->new_to_old_gid_list.length; |
141 | 79 | bool iup_optimize = false; |
142 | 79 | iup_optimize = plan->flags & HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS; |
143 | 580 | for (unsigned i = 0; i < count; i++) |
144 | 501 | { |
145 | 501 | hb_codepoint_t new_gid = plan->new_to_old_gid_list[i].first; |
146 | 501 | contour_point_vector_t *all_points; |
147 | 501 | if (!plan->new_gid_contour_points_map.has (new_gid, &all_points)) |
148 | 0 | return false; |
149 | 501 | if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, all_points, iup_optimize)) |
150 | 0 | return false; |
151 | 501 | } |
152 | 79 | return true; |
153 | 79 | } |
154 | | |
155 | | bool compile_bytes (const hb_map_t& axes_index_map, |
156 | | const hb_map_t& axes_old_index_tag_map) |
157 | 79 | { |
158 | 79 | if (!compile_shared_tuples (axes_index_map, axes_old_index_tag_map)) |
159 | 61 | return false; |
160 | 18 | for (tuple_variations_t& vars: glyph_variations) |
161 | 18 | if (!vars.compile_bytes (axes_index_map, axes_old_index_tag_map, |
162 | 18 | true, /* use shared points*/ |
163 | 18 | true, |
164 | 18 | &shared_tuples_idx_map)) |
165 | 0 | return false; |
166 | | |
167 | 18 | return true; |
168 | 18 | } |
169 | | |
170 | | bool compile_shared_tuples (const hb_map_t& axes_index_map, |
171 | | const hb_map_t& axes_old_index_tag_map) |
172 | 79 | { |
173 | | /* key is pointer to compiled_peak_coords inside each tuple, hashing |
174 | | * function will always deref pointers first */ |
175 | 79 | hb_hashmap_t<const hb_vector_t<char>*, unsigned> coords_count_map; |
176 | | |
177 | | /* count the num of shared coords */ |
178 | 79 | for (tuple_variations_t& vars: glyph_variations) |
179 | 501 | { |
180 | 501 | for (tuple_delta_t& var : vars.tuple_vars) |
181 | 18 | { |
182 | 18 | if (!var.compile_peak_coords (axes_index_map, axes_old_index_tag_map)) |
183 | 0 | return false; |
184 | 18 | unsigned* count; |
185 | 18 | if (coords_count_map.has (&(var.compiled_peak_coords), &count)) |
186 | 0 | coords_count_map.set (&(var.compiled_peak_coords), *count + 1); |
187 | 18 | else |
188 | 18 | coords_count_map.set (&(var.compiled_peak_coords), 1); |
189 | 18 | } |
190 | 501 | } |
191 | | |
192 | 79 | if (!coords_count_map || coords_count_map.in_error ()) |
193 | 61 | return false; |
194 | | |
195 | | /* add only those coords that are used more than once into the vector and sort */ |
196 | 18 | hb_vector_t<const hb_vector_t<char>*> shared_coords; |
197 | 18 | if (unlikely (!shared_coords.alloc (coords_count_map.get_population ()))) |
198 | 0 | return false; |
199 | | |
200 | 18 | for (const auto _ : coords_count_map.iter ()) |
201 | 18 | { |
202 | 18 | if (_.second == 1) continue; |
203 | 0 | shared_coords.push (_.first); |
204 | 0 | } |
205 | | |
206 | | /* no shared tuples: no coords are used more than once */ |
207 | 18 | if (!shared_coords) return true; |
208 | | /* sorting based on the coords frequency first (high to low), then compare |
209 | | * the coords bytes */ |
210 | 0 | hb_qsort (shared_coords.arrayZ, shared_coords.length, sizeof (hb_vector_t<char>*), _cmp_coords, (void *) (&coords_count_map)); |
211 | | |
212 | | /* build shared_coords->idx map and shared tuples byte array */ |
213 | |
|
214 | 0 | shared_tuples_count = hb_min (0xFFFu + 1, shared_coords.length); |
215 | 0 | unsigned len = shared_tuples_count * (shared_coords[0]->length); |
216 | 0 | if (unlikely (!compiled_shared_tuples.alloc (len))) |
217 | 0 | return false; |
218 | | |
219 | 0 | for (unsigned i = 0; i < shared_tuples_count; i++) |
220 | 0 | { |
221 | 0 | shared_tuples_idx_map.set (shared_coords[i], i); |
222 | | /* add a concat() in hb_vector_t? */ |
223 | 0 | for (char c : shared_coords[i]->iter ()) |
224 | 0 | compiled_shared_tuples.push (c); |
225 | 0 | } |
226 | |
|
227 | 0 | return true; |
228 | 0 | } |
229 | | |
230 | | static int _cmp_coords (const void *pa, const void *pb, void *arg) |
231 | 0 | { |
232 | 0 | const hb_hashmap_t<const hb_vector_t<char>*, unsigned>* coords_count_map = |
233 | 0 | reinterpret_cast<const hb_hashmap_t<const hb_vector_t<char>*, unsigned>*> (arg); |
234 | | |
235 | | /* shared_coords is hb_vector_t<const hb_vector_t<char>*> so casting pa/pb |
236 | | * to be a pointer to a pointer */ |
237 | 0 | const hb_vector_t<char>** a = reinterpret_cast<const hb_vector_t<char>**> (const_cast<void*>(pa)); |
238 | 0 | const hb_vector_t<char>** b = reinterpret_cast<const hb_vector_t<char>**> (const_cast<void*>(pb)); |
239 | |
|
240 | 0 | bool has_a = coords_count_map->has (*a); |
241 | 0 | bool has_b = coords_count_map->has (*b); |
242 | |
|
243 | 0 | if (has_a && has_b) |
244 | 0 | { |
245 | 0 | unsigned a_num = coords_count_map->get (*a); |
246 | 0 | unsigned b_num = coords_count_map->get (*b); |
247 | |
|
248 | 0 | if (a_num != b_num) |
249 | 0 | return b_num - a_num; |
250 | | |
251 | 0 | return (*b)->as_array().cmp ((*a)->as_array ()); |
252 | 0 | } |
253 | 0 | else if (has_a) return -1; |
254 | 0 | else if (has_b) return 1; |
255 | 0 | else return 0; |
256 | 0 | } |
257 | | |
258 | | template<typename Iterator, |
259 | | hb_requires (hb_is_iterator (Iterator))> |
260 | | bool serialize_glyph_var_data (hb_serialize_context_t *c, |
261 | | Iterator it, |
262 | | bool long_offset, |
263 | | unsigned num_glyphs, |
264 | | char* glyph_var_data_offsets /* OUT: glyph var data offsets array */) const |
265 | 18 | { |
266 | 18 | TRACE_SERIALIZE (this); |
267 | | |
268 | 18 | if (long_offset) |
269 | 11 | { |
270 | 11 | ((HBUINT32 *) glyph_var_data_offsets)[0] = 0; |
271 | 11 | glyph_var_data_offsets += 4; |
272 | 11 | } |
273 | 7 | else |
274 | 7 | { |
275 | 7 | ((HBUINT16 *) glyph_var_data_offsets)[0] = 0; |
276 | 7 | glyph_var_data_offsets += 2; |
277 | 7 | } |
278 | 18 | unsigned glyph_offset = 0; |
279 | 18 | hb_codepoint_t last_gid = 0; |
280 | 18 | unsigned idx = 0; |
281 | | |
282 | 18 | GlyphVariationData* cur_glyph = c->start_embed<GlyphVariationData> (); |
283 | 18 | if (!cur_glyph) return_trace (false); |
284 | 18 | for (auto &_ : it) |
285 | 18 | { |
286 | 18 | hb_codepoint_t gid = _.first; |
287 | 18 | if (long_offset) |
288 | 11 | for (; last_gid < gid; last_gid++) |
289 | 0 | ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset; |
290 | 7 | else |
291 | 7 | for (; last_gid < gid; last_gid++) |
292 | 0 | ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2; |
293 | | |
294 | 18 | if (idx >= glyph_variations.length) return_trace (false); |
295 | 18 | if (!cur_glyph->serialize (c, true, glyph_variations[idx])) return_trace (false); |
296 | 18 | GlyphVariationData* next_glyph = c->start_embed<GlyphVariationData> (); |
297 | 18 | glyph_offset += (char *) next_glyph - (char *) cur_glyph; |
298 | | |
299 | 18 | if (long_offset) |
300 | 11 | ((HBUINT32 *) glyph_var_data_offsets)[gid] = glyph_offset; |
301 | 7 | else |
302 | 7 | ((HBUINT16 *) glyph_var_data_offsets)[gid] = glyph_offset / 2; |
303 | | |
304 | 18 | last_gid++; |
305 | 18 | idx++; |
306 | 18 | cur_glyph = next_glyph; |
307 | 18 | } |
308 | | |
309 | 18 | if (long_offset) |
310 | 11 | for (; last_gid < num_glyphs; last_gid++) |
311 | 0 | ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset; |
312 | 7 | else |
313 | 7 | for (; last_gid < num_glyphs; last_gid++) |
314 | 0 | ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2; |
315 | 18 | return_trace (true); |
316 | 18 | } |
317 | | }; |
318 | | |
319 | | template <typename GidOffsetType, unsigned TableTag> |
320 | | struct gvar_GVAR |
321 | | { |
322 | | static constexpr hb_tag_t tableTag = TableTag; |
323 | | |
324 | | using GlyphVariationData = TupleVariationData<GidOffsetType>; |
325 | | |
326 | 1.45M | bool has_data () const { return version.to_int () != 0; } OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::has_data() const Line | Count | Source | 326 | 1.42M | bool has_data () const { return version.to_int () != 0; } |
OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::has_data() const Line | Count | Source | 326 | 21.4k | bool has_data () const { return version.to_int () != 0; } |
|
327 | | |
328 | | bool sanitize_shallow (hb_sanitize_context_t *c) const |
329 | 5.35k | { |
330 | 5.35k | TRACE_SANITIZE (this); |
331 | 5.35k | return_trace (c->check_struct (this) && |
332 | 5.35k | hb_barrier () && |
333 | 5.35k | (version.major == 1) && |
334 | 5.35k | sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) && |
335 | 5.35k | (is_long_offset () ? |
336 | 5.35k | c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) : |
337 | 5.35k | c->check_array (get_short_offset_array (), c->get_num_glyphs () + 1))); |
338 | 5.35k | } OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::sanitize_shallow(hb_sanitize_context_t*) const Line | Count | Source | 329 | 3.91k | { | 330 | 3.91k | TRACE_SANITIZE (this); | 331 | 3.91k | return_trace (c->check_struct (this) && | 332 | 3.91k | hb_barrier () && | 333 | 3.91k | (version.major == 1) && | 334 | 3.91k | sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) && | 335 | 3.91k | (is_long_offset () ? | 336 | 3.91k | c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) : | 337 | 3.91k | c->check_array (get_short_offset_array (), c->get_num_glyphs () + 1))); | 338 | 3.91k | } |
OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::sanitize_shallow(hb_sanitize_context_t*) const Line | Count | Source | 329 | 1.43k | { | 330 | 1.43k | TRACE_SANITIZE (this); | 331 | 1.43k | return_trace (c->check_struct (this) && | 332 | 1.43k | hb_barrier () && | 333 | 1.43k | (version.major == 1) && | 334 | 1.43k | sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) && | 335 | 1.43k | (is_long_offset () ? | 336 | 1.43k | c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) : | 337 | 1.43k | c->check_array (get_short_offset_array (), c->get_num_glyphs () + 1))); | 338 | 1.43k | } |
|
339 | | |
340 | | /* GlyphVariationData not sanitized here; must be checked while accessing each glyph variation data */ |
341 | | bool sanitize (hb_sanitize_context_t *c) const |
342 | 5.35k | { return sanitize_shallow (c); } OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::sanitize(hb_sanitize_context_t*) const Line | Count | Source | 342 | 3.91k | { return sanitize_shallow (c); } |
OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::sanitize(hb_sanitize_context_t*) const Line | Count | Source | 342 | 1.43k | { return sanitize_shallow (c); } |
|
343 | | |
344 | | bool decompile_glyph_variations (hb_subset_context_t *c, |
345 | | glyph_variations_t<GidOffsetType>& glyph_vars /* OUT */) const |
346 | 133 | { |
347 | 133 | hb_hashmap_t<hb_codepoint_t, hb_bytes_t> new_gid_var_data_map; |
348 | 133 | auto it = hb_iter (c->plan->new_to_old_gid_list); |
349 | 133 | if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) |
350 | 21 | { |
351 | 21 | new_gid_var_data_map.set (0, hb_bytes_t ()); |
352 | 21 | it++; |
353 | 21 | } |
354 | | |
355 | 133 | for (auto &_ : it) |
356 | 561 | { |
357 | 561 | hb_codepoint_t new_gid = _.first; |
358 | 561 | hb_codepoint_t old_gid = _.second; |
359 | 561 | hb_bytes_t var_data_bytes = get_glyph_var_data_bytes (c->source_blob, glyphCountX, old_gid); |
360 | 561 | new_gid_var_data_map.set (new_gid, var_data_bytes); |
361 | 561 | } |
362 | | |
363 | 133 | if (new_gid_var_data_map.in_error ()) return false; |
364 | | |
365 | 133 | hb_array_t<const F2DOT14> shared_tuples = (this+sharedTuples).as_array ((unsigned) sharedTupleCount * (unsigned) axisCount); |
366 | 133 | return glyph_vars.create_from_glyphs_var_data (axisCount, shared_tuples, c->plan, new_gid_var_data_map); |
367 | 133 | } |
368 | | |
369 | | template<typename Iterator, |
370 | | hb_requires (hb_is_iterator (Iterator))> |
371 | | bool serialize (hb_serialize_context_t *c, |
372 | | const glyph_variations_t<GidOffsetType>& glyph_vars, |
373 | | Iterator it, |
374 | | unsigned axis_count, |
375 | | unsigned num_glyphs, |
376 | | bool force_long_offsets) const |
377 | 18 | { |
378 | 18 | TRACE_SERIALIZE (this); |
379 | 18 | gvar_GVAR *out = c->allocate_min<gvar_GVAR> (); |
380 | 18 | if (unlikely (!out)) return_trace (false); |
381 | | |
382 | 18 | out->version.major = 1; |
383 | 18 | out->version.minor = 0; |
384 | 18 | out->axisCount = axis_count; |
385 | 18 | out->glyphCountX = hb_min (0xFFFFu, num_glyphs); |
386 | | |
387 | 18 | unsigned glyph_var_data_size = glyph_vars.compiled_byte_size (); |
388 | | /* According to the spec: If the short format (Offset16) is used for offsets, |
389 | | * the value stored is the offset divided by 2, so the maximum data size should |
390 | | * be 2 * 0xFFFFu, which is 0x1FFFEu */ |
391 | 18 | bool long_offset = glyph_var_data_size > 0x1FFFEu || force_long_offsets; |
392 | 18 | out->flags = long_offset ? 1 : 0; |
393 | | |
394 | 18 | HBUINT8 *glyph_var_data_offsets = c->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false); |
395 | 18 | if (!glyph_var_data_offsets) return_trace (false); |
396 | | |
397 | | /* shared tuples */ |
398 | 18 | unsigned shared_tuple_count = glyph_vars.compiled_shared_tuples_count (); |
399 | 18 | out->sharedTupleCount = shared_tuple_count; |
400 | | |
401 | 18 | if (!shared_tuple_count) |
402 | 18 | out->sharedTuples = 0; |
403 | 0 | else |
404 | 0 | { |
405 | 0 | hb_array_t<const char> shared_tuples = glyph_vars.compiled_shared_tuples.as_array ().copy (c); |
406 | 0 | if (!shared_tuples.arrayZ) return_trace (false); |
407 | 0 | out->sharedTuples = shared_tuples.arrayZ - (char *) out; |
408 | 0 | } |
409 | | |
410 | 18 | char *glyph_var_data = c->start_embed<char> (); |
411 | 18 | if (!glyph_var_data) return_trace (false); |
412 | 18 | out->dataZ = glyph_var_data - (char *) out; |
413 | | |
414 | 18 | return_trace (glyph_vars.serialize_glyph_var_data (c, it, long_offset, num_glyphs, |
415 | 18 | (char *) glyph_var_data_offsets)); |
416 | 18 | } |
417 | | |
418 | | bool instantiate (hb_subset_context_t *c) const |
419 | 133 | { |
420 | 133 | TRACE_SUBSET (this); |
421 | 133 | glyph_variations_t<GidOffsetType> glyph_vars; |
422 | 133 | if (!decompile_glyph_variations (c, glyph_vars)) |
423 | 54 | return_trace (false); |
424 | | |
425 | 79 | if (!glyph_vars.instantiate (c->plan)) return_trace (false); |
426 | 79 | if (!glyph_vars.compile_bytes (c->plan->axes_index_map, c->plan->axes_old_index_tag_map)) |
427 | 61 | return_trace (false); |
428 | | |
429 | 18 | unsigned axis_count = c->plan->axes_index_map.get_population (); |
430 | 18 | unsigned num_glyphs = c->plan->num_output_glyphs (); |
431 | 18 | auto it = hb_iter (c->plan->new_to_old_gid_list); |
432 | | |
433 | 18 | bool force_long_offsets = false; |
434 | 18 | #ifdef HB_EXPERIMENTAL_API |
435 | 18 | force_long_offsets = c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS; |
436 | 18 | #endif |
437 | 18 | return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs, force_long_offsets)); |
438 | 79 | } |
439 | | |
440 | | bool subset (hb_subset_context_t *c) const |
441 | 674 | { |
442 | 674 | TRACE_SUBSET (this); |
443 | 674 | if (c->plan->all_axes_pinned) |
444 | 0 | return_trace (false); |
445 | | |
446 | 674 | if (c->plan->normalized_coords) |
447 | 133 | return_trace (instantiate (c)); |
448 | | |
449 | 541 | unsigned glyph_count = version.to_int () ? c->plan->source->get_num_glyphs () : 0; |
450 | | |
451 | 541 | gvar_GVAR *out = c->serializer->allocate_min<gvar_GVAR> (); |
452 | 541 | if (unlikely (!out)) return_trace (false); |
453 | | |
454 | 541 | out->version.major = 1; |
455 | 541 | out->version.minor = 0; |
456 | 541 | out->axisCount = axisCount; |
457 | 541 | out->sharedTupleCount = sharedTupleCount; |
458 | | |
459 | 541 | unsigned int num_glyphs = c->plan->num_output_glyphs (); |
460 | 541 | out->glyphCountX = hb_min (0xFFFFu, num_glyphs); |
461 | | |
462 | 541 | auto it = hb_iter (c->plan->new_to_old_gid_list); |
463 | 541 | if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) |
464 | 466 | it++; |
465 | 541 | unsigned int subset_data_size = 0; |
466 | 541 | for (auto &_ : it) |
467 | 352k | { |
468 | 352k | hb_codepoint_t old_gid = _.second; |
469 | 352k | subset_data_size += get_glyph_var_data_bytes (c->source_blob, glyph_count, old_gid).length; |
470 | 352k | } |
471 | | |
472 | | /* According to the spec: If the short format (Offset16) is used for offsets, |
473 | | * the value stored is the offset divided by 2, so the maximum data size should |
474 | | * be 2 * 0xFFFFu, which is 0x1FFFEu */ |
475 | 541 | bool long_offset = subset_data_size > 0x1FFFEu; |
476 | 541 | #ifdef HB_EXPERIMENTAL_API |
477 | 541 | long_offset = long_offset || (c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS); |
478 | 541 | #endif |
479 | 541 | out->flags = long_offset ? 1 : 0; |
480 | | |
481 | 541 | HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false); |
482 | 541 | if (!subset_offsets) return_trace (false); |
483 | | |
484 | | /* shared tuples */ |
485 | 541 | if (!sharedTupleCount || !sharedTuples) |
486 | 111 | out->sharedTuples = 0; |
487 | 430 | else |
488 | 430 | { |
489 | 430 | unsigned int shared_tuple_size = F2DOT14::static_size * axisCount * sharedTupleCount; |
490 | 430 | F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size); |
491 | 430 | if (!tuples) return_trace (false); |
492 | 428 | out->sharedTuples = (char *) tuples - (char *) out; |
493 | 428 | hb_memcpy (tuples, this+sharedTuples, shared_tuple_size); |
494 | 428 | } |
495 | | |
496 | | /* This ordering relative to the shared tuples array, which puts the glyphVariationData |
497 | | last in the table, is required when HB_SUBSET_FLAGS_IFTB_REQUIREMENTS is set */ |
498 | 539 | char *subset_data = c->serializer->allocate_size<char> (subset_data_size, false); |
499 | 539 | if (!subset_data) return_trace (false); |
500 | 354 | out->dataZ = subset_data - (char *) out; |
501 | | |
502 | | |
503 | 354 | if (long_offset) |
504 | 64 | { |
505 | 64 | ((HBUINT32 *) subset_offsets)[0] = 0; |
506 | 64 | subset_offsets += 4; |
507 | 64 | } |
508 | 290 | else |
509 | 290 | { |
510 | 290 | ((HBUINT16 *) subset_offsets)[0] = 0; |
511 | 290 | subset_offsets += 2; |
512 | 290 | } |
513 | 354 | unsigned int glyph_offset = 0; |
514 | | |
515 | 354 | hb_codepoint_t last = 0; |
516 | 354 | it = hb_iter (c->plan->new_to_old_gid_list); |
517 | 354 | if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) |
518 | 307 | it++; |
519 | 354 | for (auto &_ : it) |
520 | 33.3k | { |
521 | 33.3k | hb_codepoint_t gid = _.first; |
522 | 33.3k | hb_codepoint_t old_gid = _.second; |
523 | | |
524 | 33.3k | if (long_offset) |
525 | 187k | for (; last < gid; last++) |
526 | 156k | ((HBUINT32 *) subset_offsets)[last] = glyph_offset; |
527 | 2.03k | else |
528 | 39.0k | for (; last < gid; last++) |
529 | 36.9k | ((HBUINT16 *) subset_offsets)[last] = glyph_offset / 2; |
530 | | |
531 | 33.3k | hb_bytes_t var_data_bytes = get_glyph_var_data_bytes (c->source_blob, |
532 | 33.3k | glyph_count, |
533 | 33.3k | old_gid); |
534 | | |
535 | 33.3k | hb_memcpy (subset_data, var_data_bytes.arrayZ, var_data_bytes.length); |
536 | 33.3k | subset_data += var_data_bytes.length; |
537 | 33.3k | glyph_offset += var_data_bytes.length; |
538 | | |
539 | 33.3k | if (long_offset) |
540 | 31.2k | ((HBUINT32 *) subset_offsets)[gid] = glyph_offset; |
541 | 2.03k | else |
542 | 2.03k | ((HBUINT16 *) subset_offsets)[gid] = glyph_offset / 2; |
543 | | |
544 | 33.3k | last++; // Skip over gid |
545 | 33.3k | } |
546 | | |
547 | 354 | if (long_offset) |
548 | 80 | for (; last < num_glyphs; last++) |
549 | 16 | ((HBUINT32 *) subset_offsets)[last] = glyph_offset; |
550 | 290 | else |
551 | 439 | for (; last < num_glyphs; last++) |
552 | 149 | ((HBUINT16 *) subset_offsets)[last] = glyph_offset / 2; |
553 | | |
554 | 354 | return_trace (true); |
555 | 539 | } |
556 | | |
557 | | protected: |
558 | | const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob, |
559 | | unsigned glyph_count, |
560 | | hb_codepoint_t glyph) const |
561 | 792k | { |
562 | 792k | unsigned start_offset = get_offset (glyph_count, glyph); |
563 | 792k | unsigned end_offset = get_offset (glyph_count, glyph+1); |
564 | 792k | if (unlikely (end_offset < start_offset)) return hb_bytes_t (); |
565 | 539k | unsigned length = end_offset - start_offset; |
566 | 539k | hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length); |
567 | 539k | return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t (); |
568 | 792k | } OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::get_glyph_var_data_bytes(hb_blob_t*, unsigned int, unsigned int) const Line | Count | Source | 561 | 181k | { | 562 | 181k | unsigned start_offset = get_offset (glyph_count, glyph); | 563 | 181k | unsigned end_offset = get_offset (glyph_count, glyph+1); | 564 | 181k | if (unlikely (end_offset < start_offset)) return hb_bytes_t (); | 565 | 106k | unsigned length = end_offset - start_offset; | 566 | 106k | hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length); | 567 | 106k | return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t (); | 568 | 181k | } |
OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::get_glyph_var_data_bytes(hb_blob_t*, unsigned int, unsigned int) const Line | Count | Source | 561 | 611k | { | 562 | 611k | unsigned start_offset = get_offset (glyph_count, glyph); | 563 | 611k | unsigned end_offset = get_offset (glyph_count, glyph+1); | 564 | 611k | if (unlikely (end_offset < start_offset)) return hb_bytes_t (); | 565 | 432k | unsigned length = end_offset - start_offset; | 566 | 432k | hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length); | 567 | 432k | return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t (); | 568 | 611k | } |
|
569 | | |
570 | 1.58M | bool is_long_offset () const { return flags & 1; } OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::is_long_offset() const Line | Count | Source | 570 | 1.22M | bool is_long_offset () const { return flags & 1; } |
OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::is_long_offset() const Line | Count | Source | 570 | 363k | bool is_long_offset () const { return flags & 1; } |
|
571 | | |
572 | | unsigned get_offset (unsigned glyph_count, unsigned i) const |
573 | 1.58M | { |
574 | 1.58M | if (unlikely (i > glyph_count)) return 0; |
575 | 1.58M | hb_barrier (); |
576 | 1.58M | return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2; |
577 | 1.58M | } OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::get_offset(unsigned int, unsigned int) const Line | Count | Source | 573 | 362k | { | 574 | 362k | if (unlikely (i > glyph_count)) return 0; | 575 | 362k | hb_barrier (); | 576 | 362k | return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2; | 577 | 362k | } |
OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::get_offset(unsigned int, unsigned int) const Line | Count | Source | 573 | 1.22M | { | 574 | 1.22M | if (unlikely (i > glyph_count)) return 0; | 575 | 1.22M | hb_barrier (); | 576 | 1.22M | return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2; | 577 | 1.22M | } |
|
578 | | |
579 | 519k | const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; } OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::get_long_offset_array() const Line | Count | Source | 579 | 464k | const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; } |
OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::get_long_offset_array() const Line | Count | Source | 579 | 54.7k | const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; } |
|
580 | 1.06M | const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; } OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::get_short_offset_array() const Line | Count | Source | 580 | 759k | const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; } |
OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::get_short_offset_array() const Line | Count | Source | 580 | 308k | const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; } |
|
581 | | |
582 | | public: |
583 | | struct accelerator_t |
584 | | { |
585 | | |
586 | | hb_scalar_cache_t *create_cache () const |
587 | 4.50k | { |
588 | 4.50k | return hb_scalar_cache_t::create (table->sharedTupleCount); |
589 | 4.50k | } |
590 | | |
591 | | static void destroy_cache (hb_scalar_cache_t *cache) |
592 | 4.50k | { |
593 | 4.50k | hb_scalar_cache_t::destroy (cache); |
594 | 4.50k | } |
595 | | |
596 | 1.45M | bool has_data () const { return table->has_data (); } OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::accelerator_t::has_data() const Line | Count | Source | 596 | 1.42M | bool has_data () const { return table->has_data (); } |
OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::accelerator_t::has_data() const Line | Count | Source | 596 | 21.4k | bool has_data () const { return table->has_data (); } |
|
597 | | |
598 | | accelerator_t (hb_face_t *face) |
599 | 164k | { |
600 | 164k | table = hb_sanitize_context_t ().reference_table<gvar_GVAR> (face); |
601 | | /* If sanitize failed, set glyphCount to 0. */ |
602 | 164k | glyphCount = table->version.to_int () ? face->get_num_glyphs () : 0; |
603 | 164k | } OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::accelerator_t::accelerator_t(hb_face_t*) Line | Count | Source | 599 | 82.3k | { | 600 | 82.3k | table = hb_sanitize_context_t ().reference_table<gvar_GVAR> (face); | 601 | | /* If sanitize failed, set glyphCount to 0. */ | 602 | 82.3k | glyphCount = table->version.to_int () ? face->get_num_glyphs () : 0; | 603 | 82.3k | } |
OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::accelerator_t::accelerator_t(hb_face_t*) Line | Count | Source | 599 | 82.3k | { | 600 | 82.3k | table = hb_sanitize_context_t ().reference_table<gvar_GVAR> (face); | 601 | | /* If sanitize failed, set glyphCount to 0. */ | 602 | 82.3k | glyphCount = table->version.to_int () ? face->get_num_glyphs () : 0; | 603 | 82.3k | } |
|
604 | 164k | ~accelerator_t () { table.destroy (); } OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::accelerator_t::~accelerator_t() Line | Count | Source | 604 | 82.3k | ~accelerator_t () { table.destroy (); } |
OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::accelerator_t::~accelerator_t() Line | Count | Source | 604 | 82.3k | ~accelerator_t () { table.destroy (); } |
|
605 | | |
606 | | private: |
607 | | |
608 | | static float infer_delta (const hb_array_t<contour_point_t> points, |
609 | | const hb_array_t<contour_point_t> deltas, |
610 | | unsigned int target, unsigned int prev, unsigned int next, |
611 | | float contour_point_t::*m) |
612 | 122k | { |
613 | 122k | float target_val = points.arrayZ[target].*m; |
614 | 122k | float prev_val = points.arrayZ[prev].*m; |
615 | 122k | float next_val = points.arrayZ[next].*m; |
616 | 122k | float prev_delta = deltas.arrayZ[prev].*m; |
617 | 122k | float next_delta = deltas.arrayZ[next].*m; |
618 | | |
619 | 122k | if (prev_val == next_val) |
620 | 67.5k | return (prev_delta == next_delta) ? prev_delta : 0.f; |
621 | 54.8k | else if (target_val <= hb_min (prev_val, next_val)) |
622 | 18.0k | return (prev_val < next_val) ? prev_delta : next_delta; |
623 | 36.7k | else if (target_val >= hb_max (prev_val, next_val)) |
624 | 21.2k | return (prev_val > next_val) ? prev_delta : next_delta; |
625 | | |
626 | | /* linear interpolation */ |
627 | 15.4k | float r = (target_val - prev_val) / (next_val - prev_val); |
628 | 15.4k | return prev_delta + r * (next_delta - prev_delta); |
629 | 122k | } OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::accelerator_t::infer_delta(hb_array_t<contour_point_t>, hb_array_t<contour_point_t>, unsigned int, unsigned int, unsigned int, float contour_point_t::*) Line | Count | Source | 612 | 35.5k | { | 613 | 35.5k | float target_val = points.arrayZ[target].*m; | 614 | 35.5k | float prev_val = points.arrayZ[prev].*m; | 615 | 35.5k | float next_val = points.arrayZ[next].*m; | 616 | 35.5k | float prev_delta = deltas.arrayZ[prev].*m; | 617 | 35.5k | float next_delta = deltas.arrayZ[next].*m; | 618 | | | 619 | 35.5k | if (prev_val == next_val) | 620 | 16.0k | return (prev_delta == next_delta) ? prev_delta : 0.f; | 621 | 19.4k | else if (target_val <= hb_min (prev_val, next_val)) | 622 | 7.41k | return (prev_val < next_val) ? prev_delta : next_delta; | 623 | 12.0k | else if (target_val >= hb_max (prev_val, next_val)) | 624 | 6.88k | return (prev_val > next_val) ? prev_delta : next_delta; | 625 | | | 626 | | /* linear interpolation */ | 627 | 5.19k | float r = (target_val - prev_val) / (next_val - prev_val); | 628 | 5.19k | return prev_delta + r * (next_delta - prev_delta); | 629 | 35.5k | } |
OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::accelerator_t::infer_delta(hb_array_t<contour_point_t>, hb_array_t<contour_point_t>, unsigned int, unsigned int, unsigned int, float contour_point_t::*) Line | Count | Source | 612 | 86.8k | { | 613 | 86.8k | float target_val = points.arrayZ[target].*m; | 614 | 86.8k | float prev_val = points.arrayZ[prev].*m; | 615 | 86.8k | float next_val = points.arrayZ[next].*m; | 616 | 86.8k | float prev_delta = deltas.arrayZ[prev].*m; | 617 | 86.8k | float next_delta = deltas.arrayZ[next].*m; | 618 | | | 619 | 86.8k | if (prev_val == next_val) | 620 | 51.4k | return (prev_delta == next_delta) ? prev_delta : 0.f; | 621 | 35.3k | else if (target_val <= hb_min (prev_val, next_val)) | 622 | 10.6k | return (prev_val < next_val) ? prev_delta : next_delta; | 623 | 24.6k | else if (target_val >= hb_max (prev_val, next_val)) | 624 | 14.3k | return (prev_val > next_val) ? prev_delta : next_delta; | 625 | | | 626 | | /* linear interpolation */ | 627 | 10.2k | float r = (target_val - prev_val) / (next_val - prev_val); | 628 | 10.2k | return prev_delta + r * (next_delta - prev_delta); | 629 | 86.8k | } |
|
630 | | |
631 | | static unsigned int next_index (unsigned int i, unsigned int start, unsigned int end) |
632 | 143k | { return (i >= end) ? start : (i + 1); } OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::accelerator_t::next_index(unsigned int, unsigned int, unsigned int) Line | Count | Source | 632 | 42.3k | { return (i >= end) ? start : (i + 1); } |
OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::accelerator_t::next_index(unsigned int, unsigned int, unsigned int) Line | Count | Source | 632 | 101k | { return (i >= end) ? start : (i + 1); } |
|
633 | | |
634 | | public: |
635 | | bool apply_deltas_to_points (hb_codepoint_t glyph, |
636 | | hb_array_t<const int> coords, |
637 | | const hb_array_t<contour_point_t> points, |
638 | | hb_glyf_scratch_t &scratch, |
639 | | hb_scalar_cache_t *gvar_cache = nullptr, |
640 | | bool phantom_only = false) const |
641 | 1.42M | { |
642 | 1.42M | if (unlikely (glyph >= glyphCount)) return true; |
643 | | |
644 | 405k | hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyphCount, glyph); |
645 | 405k | if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true; |
646 | | |
647 | 197k | auto &shared_indices = scratch.shared_indices; |
648 | 197k | shared_indices.clear (); |
649 | | |
650 | 197k | typename GlyphVariationData::tuple_iterator_t iterator; |
651 | 197k | if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount, |
652 | 197k | var_data_bytes.arrayZ, |
653 | 197k | shared_indices, &iterator)) |
654 | 40.2k | return true; /* so isn't applied at all */ |
655 | | |
656 | | /* Save original points for inferred delta calculation */ |
657 | 157k | auto &orig_points_vec = scratch.orig_points; |
658 | 157k | orig_points_vec.clear (); // Populated lazily |
659 | 157k | auto orig_points = orig_points_vec.as_array (); |
660 | | |
661 | | /* flag is used to indicate referenced point */ |
662 | 157k | auto &deltas_vec = scratch.deltas; |
663 | 157k | deltas_vec.clear (); // Populated lazily |
664 | 157k | auto deltas = deltas_vec.as_array (); |
665 | | |
666 | 157k | unsigned num_coords = table->axisCount; |
667 | 157k | hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * num_coords); |
668 | | |
669 | 157k | auto &private_indices = scratch.private_indices; |
670 | 157k | auto &x_deltas = scratch.x_deltas; |
671 | 157k | auto &y_deltas = scratch.y_deltas; |
672 | | |
673 | 157k | unsigned count = points.length; |
674 | 157k | bool flush = false; |
675 | | |
676 | 157k | do |
677 | 1.87M | { |
678 | 1.87M | float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples, |
679 | 1.87M | gvar_cache); |
680 | | |
681 | 1.87M | if (scalar == 0.f) continue; |
682 | 126k | const HBUINT8 *p = iterator.get_serialized_data (); |
683 | 126k | unsigned int length = iterator.current_tuple->get_data_size (); |
684 | 126k | if (unlikely (!iterator.var_data_bytes.check_range (p, length))) |
685 | 48.0k | return false; |
686 | | |
687 | 77.9k | if (!deltas) |
688 | 65.8k | { |
689 | 65.8k | if (unlikely (!deltas_vec.resize (count, false))) return false; |
690 | 63.2k | deltas = deltas_vec.as_array (); |
691 | 63.2k | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, |
692 | 63.2k | (phantom_only ? 4 : count) * sizeof (deltas[0])); |
693 | 63.2k | } |
694 | | |
695 | 75.3k | const HBUINT8 *end = p + length; |
696 | | |
697 | 75.3k | bool has_private_points = iterator.current_tuple->has_private_points (); |
698 | 75.3k | if (has_private_points && |
699 | 75.3k | !GlyphVariationData::decompile_points (p, private_indices, end)) |
700 | 8.25k | return false; |
701 | 67.0k | const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices; |
702 | | |
703 | 67.0k | bool apply_to_all = (indices.length == 0); |
704 | 67.0k | unsigned num_deltas = apply_to_all ? points.length : indices.length; |
705 | 67.0k | unsigned start_deltas = (phantom_only && num_deltas >= 4 ? num_deltas - 4 : 0); |
706 | 67.0k | if (unlikely (!x_deltas.resize (num_deltas, false))) return false; |
707 | 65.5k | if (unlikely (!GlyphVariationData::decompile_deltas (p, x_deltas, end, false, start_deltas))) return false; |
708 | 42.6k | if (unlikely (!y_deltas.resize (num_deltas, false))) return false; |
709 | 41.1k | if (unlikely (!GlyphVariationData::decompile_deltas (p, y_deltas, end, false, start_deltas))) return false; |
710 | | |
711 | 27.7k | if (!apply_to_all) |
712 | 18.2k | { |
713 | 18.2k | if (!orig_points && !phantom_only) |
714 | 14.2k | { |
715 | 14.2k | orig_points_vec.extend (points); |
716 | 14.2k | if (unlikely (orig_points_vec.in_error ())) return false; |
717 | 11.8k | orig_points = orig_points_vec.as_array (); |
718 | 11.8k | } |
719 | | |
720 | 15.9k | if (flush) |
721 | 3.26k | { |
722 | 32.8k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
723 | 29.6k | points.arrayZ[i].translate (deltas.arrayZ[i]); |
724 | 3.26k | flush = false; |
725 | | |
726 | 3.26k | } |
727 | 15.9k | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, |
728 | 15.9k | (phantom_only ? 4 : count) * sizeof (deltas[0])); |
729 | 15.9k | } |
730 | | |
731 | 25.4k | if (HB_OPTIMIZE_SIZE_VAL) |
732 | 0 | { |
733 | 0 | for (unsigned int i = 0; i < num_deltas; i++) |
734 | 0 | { |
735 | 0 | unsigned int pt_index; |
736 | 0 | if (apply_to_all) |
737 | 0 | pt_index = i; |
738 | 0 | else |
739 | 0 | { |
740 | 0 | pt_index = indices[i]; |
741 | 0 | if (unlikely (pt_index >= deltas.length)) continue; |
742 | 0 | } |
743 | 0 | if (phantom_only && pt_index < count - 4) continue; |
744 | 0 | auto &delta = deltas.arrayZ[pt_index]; |
745 | 0 | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
746 | 0 | delta.add_delta (x_deltas.arrayZ[i] * scalar, |
747 | 0 | y_deltas.arrayZ[i] * scalar); |
748 | 0 | } |
749 | 0 | } |
750 | 25.4k | else |
751 | 25.4k | { |
752 | | /* Ouch. Four cases... for optimization. */ |
753 | 25.4k | if (scalar != 1.0f) |
754 | 6.67k | { |
755 | 6.67k | if (apply_to_all) |
756 | 17.5k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
757 | 15.3k | { |
758 | 15.3k | auto &delta = deltas.arrayZ[i]; |
759 | 15.3k | delta.add_delta (x_deltas.arrayZ[i] * scalar, |
760 | 15.3k | y_deltas.arrayZ[i] * scalar); |
761 | 15.3k | } |
762 | 4.46k | else |
763 | 20.1k | for (unsigned int i = 0; i < num_deltas; i++) |
764 | 15.6k | { |
765 | 15.6k | unsigned int pt_index = indices[i]; |
766 | 15.6k | if (unlikely (pt_index >= deltas.length)) continue; |
767 | 10.0k | if (phantom_only && pt_index < count - 4) continue; |
768 | 9.39k | auto &delta = deltas.arrayZ[pt_index]; |
769 | 9.39k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
770 | 9.39k | delta.add_delta (x_deltas.arrayZ[i] * scalar, |
771 | 9.39k | y_deltas.arrayZ[i] * scalar); |
772 | 9.39k | } |
773 | 6.67k | } |
774 | 18.7k | else |
775 | 18.7k | { |
776 | 18.7k | if (apply_to_all) |
777 | 102k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
778 | 94.8k | { |
779 | 94.8k | auto &delta = deltas.arrayZ[i]; |
780 | 94.8k | delta.add_delta (x_deltas.arrayZ[i], |
781 | 94.8k | y_deltas.arrayZ[i]); |
782 | 94.8k | } |
783 | 11.4k | else |
784 | 83.3k | for (unsigned int i = 0; i < num_deltas; i++) |
785 | 71.8k | { |
786 | 71.8k | unsigned int pt_index = indices[i]; |
787 | 71.8k | if (unlikely (pt_index >= deltas.length)) continue; |
788 | 14.9k | if (phantom_only && pt_index < count - 4) continue; |
789 | 14.1k | auto &delta = deltas.arrayZ[pt_index]; |
790 | 14.1k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
791 | 14.1k | delta.add_delta (x_deltas.arrayZ[i], |
792 | 14.1k | y_deltas.arrayZ[i]); |
793 | 14.1k | } |
794 | 18.7k | } |
795 | 25.4k | } |
796 | | |
797 | | /* infer deltas for unreferenced points */ |
798 | 25.4k | if (!apply_to_all && !phantom_only) |
799 | 15.0k | { |
800 | 15.0k | unsigned start_point = 0; |
801 | 15.0k | unsigned end_point = 0; |
802 | 57.6k | while (true) |
803 | 57.6k | { |
804 | 219k | while (end_point < count && !points.arrayZ[end_point].is_end_point) |
805 | 161k | end_point++; |
806 | 57.6k | if (unlikely (end_point == count)) break; |
807 | | |
808 | | /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */ |
809 | 42.5k | unsigned unref_count = 0; |
810 | 186k | for (unsigned i = start_point; i < end_point + 1; i++) |
811 | 144k | unref_count += deltas.arrayZ[i].flag; |
812 | 42.5k | unref_count = (end_point - start_point + 1) - unref_count; |
813 | | |
814 | 42.5k | unsigned j = start_point; |
815 | 42.5k | if (unref_count == 0 || unref_count > end_point - start_point) |
816 | 39.6k | goto no_more_gaps; |
817 | | |
818 | 2.93k | for (;;) |
819 | 4.07k | { |
820 | | /* Locate the next gap of unreferenced points between two referenced points prev and next. |
821 | | * Note that a gap may wrap around at left (start_point) and/or at right (end_point). |
822 | | */ |
823 | 4.07k | unsigned int prev, next, i; |
824 | 4.07k | for (;;) |
825 | 16.0k | { |
826 | 16.0k | i = j; |
827 | 16.0k | j = next_index (i, start_point, end_point); |
828 | 16.0k | if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break; |
829 | 16.0k | } |
830 | 4.07k | prev = j = i; |
831 | 4.07k | for (;;) |
832 | 65.2k | { |
833 | 65.2k | i = j; |
834 | 65.2k | j = next_index (i, start_point, end_point); |
835 | 65.2k | if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break; |
836 | 65.2k | } |
837 | 4.07k | next = j; |
838 | | /* Infer deltas for all unref points in the gap between prev and next */ |
839 | 4.07k | i = prev; |
840 | 4.07k | for (;;) |
841 | 62.3k | { |
842 | 62.3k | i = next_index (i, start_point, end_point); |
843 | 62.3k | if (i == next) break; |
844 | 61.1k | deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x); |
845 | 61.1k | deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y); |
846 | 61.1k | if (--unref_count == 0) goto no_more_gaps; |
847 | 61.1k | } |
848 | 4.07k | } |
849 | 42.5k | no_more_gaps: |
850 | 42.5k | start_point = end_point = end_point + 1; |
851 | 42.5k | } |
852 | 15.0k | } |
853 | | |
854 | 25.4k | flush = true; |
855 | | |
856 | 1.77M | } while (iterator.move_to_next ()); |
857 | | |
858 | 56.8k | if (flush) |
859 | 5.19k | { |
860 | 79.9k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
861 | 74.7k | points.arrayZ[i].translate (deltas.arrayZ[i]); |
862 | 5.19k | } |
863 | | |
864 | 56.8k | return true; |
865 | 157k | } OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::accelerator_t::apply_deltas_to_points(unsigned int, hb_array_t<int const>, hb_array_t<contour_point_t>, hb_glyf_scratch_t&, OT::hb_scalar_cache_t*, bool) const Line | Count | Source | 641 | 267k | { | 642 | 267k | if (unlikely (glyph >= glyphCount)) return true; | 643 | | | 644 | 181k | hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyphCount, glyph); | 645 | 181k | if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true; | 646 | | | 647 | 85.1k | auto &shared_indices = scratch.shared_indices; | 648 | 85.1k | shared_indices.clear (); | 649 | | | 650 | 85.1k | typename GlyphVariationData::tuple_iterator_t iterator; | 651 | 85.1k | if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount, | 652 | 85.1k | var_data_bytes.arrayZ, | 653 | 85.1k | shared_indices, &iterator)) | 654 | 21.0k | return true; /* so isn't applied at all */ | 655 | | | 656 | | /* Save original points for inferred delta calculation */ | 657 | 64.1k | auto &orig_points_vec = scratch.orig_points; | 658 | 64.1k | orig_points_vec.clear (); // Populated lazily | 659 | 64.1k | auto orig_points = orig_points_vec.as_array (); | 660 | | | 661 | | /* flag is used to indicate referenced point */ | 662 | 64.1k | auto &deltas_vec = scratch.deltas; | 663 | 64.1k | deltas_vec.clear (); // Populated lazily | 664 | 64.1k | auto deltas = deltas_vec.as_array (); | 665 | | | 666 | 64.1k | unsigned num_coords = table->axisCount; | 667 | 64.1k | hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * num_coords); | 668 | | | 669 | 64.1k | auto &private_indices = scratch.private_indices; | 670 | 64.1k | auto &x_deltas = scratch.x_deltas; | 671 | 64.1k | auto &y_deltas = scratch.y_deltas; | 672 | | | 673 | 64.1k | unsigned count = points.length; | 674 | 64.1k | bool flush = false; | 675 | | | 676 | 64.1k | do | 677 | 431k | { | 678 | 431k | float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples, | 679 | 431k | gvar_cache); | 680 | | | 681 | 431k | if (scalar == 0.f) continue; | 682 | 64.3k | const HBUINT8 *p = iterator.get_serialized_data (); | 683 | 64.3k | unsigned int length = iterator.current_tuple->get_data_size (); | 684 | 64.3k | if (unlikely (!iterator.var_data_bytes.check_range (p, length))) | 685 | 26.6k | return false; | 686 | | | 687 | 37.6k | if (!deltas) | 688 | 34.2k | { | 689 | 34.2k | if (unlikely (!deltas_vec.resize (count, false))) return false; | 690 | 32.5k | deltas = deltas_vec.as_array (); | 691 | 32.5k | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, | 692 | 32.5k | (phantom_only ? 4 : count) * sizeof (deltas[0])); | 693 | 32.5k | } | 694 | | | 695 | 35.9k | const HBUINT8 *end = p + length; | 696 | | | 697 | 35.9k | bool has_private_points = iterator.current_tuple->has_private_points (); | 698 | 35.9k | if (has_private_points && | 699 | 35.9k | !GlyphVariationData::decompile_points (p, private_indices, end)) | 700 | 4.20k | return false; | 701 | 31.7k | const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices; | 702 | | | 703 | 31.7k | bool apply_to_all = (indices.length == 0); | 704 | 31.7k | unsigned num_deltas = apply_to_all ? points.length : indices.length; | 705 | 31.7k | unsigned start_deltas = (phantom_only && num_deltas >= 4 ? num_deltas - 4 : 0); | 706 | 31.7k | if (unlikely (!x_deltas.resize (num_deltas, false))) return false; | 707 | 30.6k | if (unlikely (!GlyphVariationData::decompile_deltas (p, x_deltas, end, false, start_deltas))) return false; | 708 | 16.3k | if (unlikely (!y_deltas.resize (num_deltas, false))) return false; | 709 | 15.2k | if (unlikely (!GlyphVariationData::decompile_deltas (p, y_deltas, end, false, start_deltas))) return false; | 710 | | | 711 | 12.1k | if (!apply_to_all) | 712 | 8.65k | { | 713 | 8.65k | if (!orig_points && !phantom_only) | 714 | 7.45k | { | 715 | 7.45k | orig_points_vec.extend (points); | 716 | 7.45k | if (unlikely (orig_points_vec.in_error ())) return false; | 717 | 5.58k | orig_points = orig_points_vec.as_array (); | 718 | 5.58k | } | 719 | | | 720 | 6.78k | if (flush) | 721 | 1.14k | { | 722 | 8.67k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 723 | 7.52k | points.arrayZ[i].translate (deltas.arrayZ[i]); | 724 | 1.14k | flush = false; | 725 | | | 726 | 1.14k | } | 727 | 6.78k | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, | 728 | 6.78k | (phantom_only ? 4 : count) * sizeof (deltas[0])); | 729 | 6.78k | } | 730 | | | 731 | 10.2k | if (HB_OPTIMIZE_SIZE_VAL) | 732 | 0 | { | 733 | 0 | for (unsigned int i = 0; i < num_deltas; i++) | 734 | 0 | { | 735 | 0 | unsigned int pt_index; | 736 | 0 | if (apply_to_all) | 737 | 0 | pt_index = i; | 738 | 0 | else | 739 | 0 | { | 740 | 0 | pt_index = indices[i]; | 741 | 0 | if (unlikely (pt_index >= deltas.length)) continue; | 742 | 0 | } | 743 | 0 | if (phantom_only && pt_index < count - 4) continue; | 744 | 0 | auto &delta = deltas.arrayZ[pt_index]; | 745 | 0 | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ | 746 | 0 | delta.add_delta (x_deltas.arrayZ[i] * scalar, | 747 | 0 | y_deltas.arrayZ[i] * scalar); | 748 | 0 | } | 749 | 0 | } | 750 | 10.2k | else | 751 | 10.2k | { | 752 | | /* Ouch. Four cases... for optimization. */ | 753 | 10.2k | if (scalar != 1.0f) | 754 | 1.29k | { | 755 | 1.29k | if (apply_to_all) | 756 | 4.36k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 757 | 3.70k | { | 758 | 3.70k | auto &delta = deltas.arrayZ[i]; | 759 | 3.70k | delta.add_delta (x_deltas.arrayZ[i] * scalar, | 760 | 3.70k | y_deltas.arrayZ[i] * scalar); | 761 | 3.70k | } | 762 | 630 | else | 763 | 2.32k | for (unsigned int i = 0; i < num_deltas; i++) | 764 | 1.69k | { | 765 | 1.69k | unsigned int pt_index = indices[i]; | 766 | 1.69k | if (unlikely (pt_index >= deltas.length)) continue; | 767 | 1.26k | if (phantom_only && pt_index < count - 4) continue; | 768 | 1.26k | auto &delta = deltas.arrayZ[pt_index]; | 769 | 1.26k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ | 770 | 1.26k | delta.add_delta (x_deltas.arrayZ[i] * scalar, | 771 | 1.26k | y_deltas.arrayZ[i] * scalar); | 772 | 1.26k | } | 773 | 1.29k | } | 774 | 8.96k | else | 775 | 8.96k | { | 776 | 8.96k | if (apply_to_all) | 777 | 23.0k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 778 | 20.2k | { | 779 | 20.2k | auto &delta = deltas.arrayZ[i]; | 780 | 20.2k | delta.add_delta (x_deltas.arrayZ[i], | 781 | 20.2k | y_deltas.arrayZ[i]); | 782 | 20.2k | } | 783 | 6.15k | else | 784 | 63.5k | for (unsigned int i = 0; i < num_deltas; i++) | 785 | 57.3k | { | 786 | 57.3k | unsigned int pt_index = indices[i]; | 787 | 57.3k | if (unlikely (pt_index >= deltas.length)) continue; | 788 | 7.15k | if (phantom_only && pt_index < count - 4) continue; | 789 | 7.11k | auto &delta = deltas.arrayZ[pt_index]; | 790 | 7.11k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ | 791 | 7.11k | delta.add_delta (x_deltas.arrayZ[i], | 792 | 7.11k | y_deltas.arrayZ[i]); | 793 | 7.11k | } | 794 | 8.96k | } | 795 | 10.2k | } | 796 | | | 797 | | /* infer deltas for unreferenced points */ | 798 | 10.2k | if (!apply_to_all && !phantom_only) | 799 | 6.73k | { | 800 | 6.73k | unsigned start_point = 0; | 801 | 6.73k | unsigned end_point = 0; | 802 | 23.4k | while (true) | 803 | 23.4k | { | 804 | 78.2k | while (end_point < count && !points.arrayZ[end_point].is_end_point) | 805 | 54.7k | end_point++; | 806 | 23.4k | if (unlikely (end_point == count)) break; | 807 | | | 808 | | /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */ | 809 | 16.7k | unsigned unref_count = 0; | 810 | 61.2k | for (unsigned i = start_point; i < end_point + 1; i++) | 811 | 44.5k | unref_count += deltas.arrayZ[i].flag; | 812 | 16.7k | unref_count = (end_point - start_point + 1) - unref_count; | 813 | | | 814 | 16.7k | unsigned j = start_point; | 815 | 16.7k | if (unref_count == 0 || unref_count > end_point - start_point) | 816 | 16.2k | goto no_more_gaps; | 817 | | | 818 | 469 | for (;;) | 819 | 944 | { | 820 | | /* Locate the next gap of unreferenced points between two referenced points prev and next. | 821 | | * Note that a gap may wrap around at left (start_point) and/or at right (end_point). | 822 | | */ | 823 | 944 | unsigned int prev, next, i; | 824 | 944 | for (;;) | 825 | 5.35k | { | 826 | 5.35k | i = j; | 827 | 5.35k | j = next_index (i, start_point, end_point); | 828 | 5.35k | if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break; | 829 | 5.35k | } | 830 | 944 | prev = j = i; | 831 | 944 | for (;;) | 832 | 18.7k | { | 833 | 18.7k | i = j; | 834 | 18.7k | j = next_index (i, start_point, end_point); | 835 | 18.7k | if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break; | 836 | 18.7k | } | 837 | 944 | next = j; | 838 | | /* Infer deltas for all unref points in the gap between prev and next */ | 839 | 944 | i = prev; | 840 | 944 | for (;;) | 841 | 18.2k | { | 842 | 18.2k | i = next_index (i, start_point, end_point); | 843 | 18.2k | if (i == next) break; | 844 | 17.7k | deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x); | 845 | 17.7k | deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y); | 846 | 17.7k | if (--unref_count == 0) goto no_more_gaps; | 847 | 17.7k | } | 848 | 944 | } | 849 | 16.7k | no_more_gaps: | 850 | 16.7k | start_point = end_point = end_point + 1; | 851 | 16.7k | } | 852 | 6.73k | } | 853 | | | 854 | 10.2k | flush = true; | 855 | | | 856 | 377k | } while (iterator.move_to_next ()); | 857 | | | 858 | 10.1k | if (flush) | 859 | 1.67k | { | 860 | 12.7k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 861 | 11.0k | points.arrayZ[i].translate (deltas.arrayZ[i]); | 862 | 1.67k | } | 863 | | | 864 | 10.1k | return true; | 865 | 64.1k | } |
OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::accelerator_t::apply_deltas_to_points(unsigned int, hb_array_t<int const>, hb_array_t<contour_point_t>, hb_glyf_scratch_t&, OT::hb_scalar_cache_t*, bool) const Line | Count | Source | 641 | 1.16M | { | 642 | 1.16M | if (unlikely (glyph >= glyphCount)) return true; | 643 | | | 644 | 224k | hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyphCount, glyph); | 645 | 224k | if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true; | 646 | | | 647 | 112k | auto &shared_indices = scratch.shared_indices; | 648 | 112k | shared_indices.clear (); | 649 | | | 650 | 112k | typename GlyphVariationData::tuple_iterator_t iterator; | 651 | 112k | if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount, | 652 | 112k | var_data_bytes.arrayZ, | 653 | 112k | shared_indices, &iterator)) | 654 | 19.1k | return true; /* so isn't applied at all */ | 655 | | | 656 | | /* Save original points for inferred delta calculation */ | 657 | 93.2k | auto &orig_points_vec = scratch.orig_points; | 658 | 93.2k | orig_points_vec.clear (); // Populated lazily | 659 | 93.2k | auto orig_points = orig_points_vec.as_array (); | 660 | | | 661 | | /* flag is used to indicate referenced point */ | 662 | 93.2k | auto &deltas_vec = scratch.deltas; | 663 | 93.2k | deltas_vec.clear (); // Populated lazily | 664 | 93.2k | auto deltas = deltas_vec.as_array (); | 665 | | | 666 | 93.2k | unsigned num_coords = table->axisCount; | 667 | 93.2k | hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * num_coords); | 668 | | | 669 | 93.2k | auto &private_indices = scratch.private_indices; | 670 | 93.2k | auto &x_deltas = scratch.x_deltas; | 671 | 93.2k | auto &y_deltas = scratch.y_deltas; | 672 | | | 673 | 93.2k | unsigned count = points.length; | 674 | 93.2k | bool flush = false; | 675 | | | 676 | 93.2k | do | 677 | 1.44M | { | 678 | 1.44M | float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples, | 679 | 1.44M | gvar_cache); | 680 | | | 681 | 1.44M | if (scalar == 0.f) continue; | 682 | 61.6k | const HBUINT8 *p = iterator.get_serialized_data (); | 683 | 61.6k | unsigned int length = iterator.current_tuple->get_data_size (); | 684 | 61.6k | if (unlikely (!iterator.var_data_bytes.check_range (p, length))) | 685 | 21.3k | return false; | 686 | | | 687 | 40.3k | if (!deltas) | 688 | 31.6k | { | 689 | 31.6k | if (unlikely (!deltas_vec.resize (count, false))) return false; | 690 | 30.7k | deltas = deltas_vec.as_array (); | 691 | 30.7k | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, | 692 | 30.7k | (phantom_only ? 4 : count) * sizeof (deltas[0])); | 693 | 30.7k | } | 694 | | | 695 | 39.4k | const HBUINT8 *end = p + length; | 696 | | | 697 | 39.4k | bool has_private_points = iterator.current_tuple->has_private_points (); | 698 | 39.4k | if (has_private_points && | 699 | 39.4k | !GlyphVariationData::decompile_points (p, private_indices, end)) | 700 | 4.05k | return false; | 701 | 35.3k | const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices; | 702 | | | 703 | 35.3k | bool apply_to_all = (indices.length == 0); | 704 | 35.3k | unsigned num_deltas = apply_to_all ? points.length : indices.length; | 705 | 35.3k | unsigned start_deltas = (phantom_only && num_deltas >= 4 ? num_deltas - 4 : 0); | 706 | 35.3k | if (unlikely (!x_deltas.resize (num_deltas, false))) return false; | 707 | 34.9k | if (unlikely (!GlyphVariationData::decompile_deltas (p, x_deltas, end, false, start_deltas))) return false; | 708 | 26.3k | if (unlikely (!y_deltas.resize (num_deltas, false))) return false; | 709 | 25.8k | if (unlikely (!GlyphVariationData::decompile_deltas (p, y_deltas, end, false, start_deltas))) return false; | 710 | | | 711 | 15.6k | if (!apply_to_all) | 712 | 9.62k | { | 713 | 9.62k | if (!orig_points && !phantom_only) | 714 | 6.77k | { | 715 | 6.77k | orig_points_vec.extend (points); | 716 | 6.77k | if (unlikely (orig_points_vec.in_error ())) return false; | 717 | 6.27k | orig_points = orig_points_vec.as_array (); | 718 | 6.27k | } | 719 | | | 720 | 9.12k | if (flush) | 721 | 2.12k | { | 722 | 24.2k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 723 | 22.0k | points.arrayZ[i].translate (deltas.arrayZ[i]); | 724 | 2.12k | flush = false; | 725 | | | 726 | 2.12k | } | 727 | 9.12k | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, | 728 | 9.12k | (phantom_only ? 4 : count) * sizeof (deltas[0])); | 729 | 9.12k | } | 730 | | | 731 | 15.1k | if (HB_OPTIMIZE_SIZE_VAL) | 732 | 0 | { | 733 | 0 | for (unsigned int i = 0; i < num_deltas; i++) | 734 | 0 | { | 735 | 0 | unsigned int pt_index; | 736 | 0 | if (apply_to_all) | 737 | 0 | pt_index = i; | 738 | 0 | else | 739 | 0 | { | 740 | 0 | pt_index = indices[i]; | 741 | 0 | if (unlikely (pt_index >= deltas.length)) continue; | 742 | 0 | } | 743 | 0 | if (phantom_only && pt_index < count - 4) continue; | 744 | 0 | auto &delta = deltas.arrayZ[pt_index]; | 745 | 0 | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ | 746 | 0 | delta.add_delta (x_deltas.arrayZ[i] * scalar, | 747 | 0 | y_deltas.arrayZ[i] * scalar); | 748 | 0 | } | 749 | 0 | } | 750 | 15.1k | else | 751 | 15.1k | { | 752 | | /* Ouch. Four cases... for optimization. */ | 753 | 15.1k | if (scalar != 1.0f) | 754 | 5.38k | { | 755 | 5.38k | if (apply_to_all) | 756 | 13.2k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 757 | 11.6k | { | 758 | 11.6k | auto &delta = deltas.arrayZ[i]; | 759 | 11.6k | delta.add_delta (x_deltas.arrayZ[i] * scalar, | 760 | 11.6k | y_deltas.arrayZ[i] * scalar); | 761 | 11.6k | } | 762 | 3.83k | else | 763 | 17.8k | for (unsigned int i = 0; i < num_deltas; i++) | 764 | 13.9k | { | 765 | 13.9k | unsigned int pt_index = indices[i]; | 766 | 13.9k | if (unlikely (pt_index >= deltas.length)) continue; | 767 | 8.79k | if (phantom_only && pt_index < count - 4) continue; | 768 | 8.13k | auto &delta = deltas.arrayZ[pt_index]; | 769 | 8.13k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ | 770 | 8.13k | delta.add_delta (x_deltas.arrayZ[i] * scalar, | 771 | 8.13k | y_deltas.arrayZ[i] * scalar); | 772 | 8.13k | } | 773 | 5.38k | } | 774 | 9.77k | else | 775 | 9.77k | { | 776 | 9.77k | if (apply_to_all) | 777 | 79.1k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 778 | 74.6k | { | 779 | 74.6k | auto &delta = deltas.arrayZ[i]; | 780 | 74.6k | delta.add_delta (x_deltas.arrayZ[i], | 781 | 74.6k | y_deltas.arrayZ[i]); | 782 | 74.6k | } | 783 | 5.29k | else | 784 | 19.7k | for (unsigned int i = 0; i < num_deltas; i++) | 785 | 14.5k | { | 786 | 14.5k | unsigned int pt_index = indices[i]; | 787 | 14.5k | if (unlikely (pt_index >= deltas.length)) continue; | 788 | 7.76k | if (phantom_only && pt_index < count - 4) continue; | 789 | 7.03k | auto &delta = deltas.arrayZ[pt_index]; | 790 | 7.03k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ | 791 | 7.03k | delta.add_delta (x_deltas.arrayZ[i], | 792 | 7.03k | y_deltas.arrayZ[i]); | 793 | 7.03k | } | 794 | 9.77k | } | 795 | 15.1k | } | 796 | | | 797 | | /* infer deltas for unreferenced points */ | 798 | 15.1k | if (!apply_to_all && !phantom_only) | 799 | 8.34k | { | 800 | 8.34k | unsigned start_point = 0; | 801 | 8.34k | unsigned end_point = 0; | 802 | 34.1k | while (true) | 803 | 34.1k | { | 804 | 141k | while (end_point < count && !points.arrayZ[end_point].is_end_point) | 805 | 106k | end_point++; | 806 | 34.1k | if (unlikely (end_point == count)) break; | 807 | | | 808 | | /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */ | 809 | 25.8k | unsigned unref_count = 0; | 810 | 125k | for (unsigned i = start_point; i < end_point + 1; i++) | 811 | 99.4k | unref_count += deltas.arrayZ[i].flag; | 812 | 25.8k | unref_count = (end_point - start_point + 1) - unref_count; | 813 | | | 814 | 25.8k | unsigned j = start_point; | 815 | 25.8k | if (unref_count == 0 || unref_count > end_point - start_point) | 816 | 23.3k | goto no_more_gaps; | 817 | | | 818 | 2.46k | for (;;) | 819 | 3.12k | { | 820 | | /* Locate the next gap of unreferenced points between two referenced points prev and next. | 821 | | * Note that a gap may wrap around at left (start_point) and/or at right (end_point). | 822 | | */ | 823 | 3.12k | unsigned int prev, next, i; | 824 | 3.12k | for (;;) | 825 | 10.6k | { | 826 | 10.6k | i = j; | 827 | 10.6k | j = next_index (i, start_point, end_point); | 828 | 10.6k | if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break; | 829 | 10.6k | } | 830 | 3.12k | prev = j = i; | 831 | 3.12k | for (;;) | 832 | 46.5k | { | 833 | 46.5k | i = j; | 834 | 46.5k | j = next_index (i, start_point, end_point); | 835 | 46.5k | if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break; | 836 | 46.5k | } | 837 | 3.12k | next = j; | 838 | | /* Infer deltas for all unref points in the gap between prev and next */ | 839 | 3.12k | i = prev; | 840 | 3.12k | for (;;) | 841 | 44.0k | { | 842 | 44.0k | i = next_index (i, start_point, end_point); | 843 | 44.0k | if (i == next) break; | 844 | 43.4k | deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x); | 845 | 43.4k | deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y); | 846 | 43.4k | if (--unref_count == 0) goto no_more_gaps; | 847 | 43.4k | } | 848 | 3.12k | } | 849 | 25.8k | no_more_gaps: | 850 | 25.8k | start_point = end_point = end_point + 1; | 851 | 25.8k | } | 852 | 8.34k | } | 853 | | | 854 | 15.1k | flush = true; | 855 | | | 856 | 1.40M | } while (iterator.move_to_next ()); | 857 | | | 858 | 46.7k | if (flush) | 859 | 3.51k | { | 860 | 67.1k | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) | 861 | 63.6k | points.arrayZ[i].translate (deltas.arrayZ[i]); | 862 | 3.51k | } | 863 | | | 864 | 46.7k | return true; | 865 | 93.2k | } |
|
866 | | |
867 | | unsigned int get_axis_count () const { return table->axisCount; } |
868 | | |
869 | | private: |
870 | | hb_blob_ptr_t<gvar_GVAR> table; |
871 | | unsigned glyphCount; |
872 | | }; |
873 | | |
874 | | protected: |
875 | | FixedVersion<>version; /* Version number of the glyph variations table |
876 | | * Set to 0x00010000u. */ |
877 | | HBUINT16 axisCount; /* The number of variation axes for this font. This must be |
878 | | * the same number as axisCount in the 'fvar' table. */ |
879 | | HBUINT16 sharedTupleCount; |
880 | | /* The number of shared tuple records. Shared tuple records |
881 | | * can be referenced within glyph variation data tables for |
882 | | * multiple glyphs, as opposed to other tuple records stored |
883 | | * directly within a glyph variation data table. */ |
884 | | NNOffset32To<UnsizedArrayOf<F2DOT14>> |
885 | | sharedTuples; /* Offset from the start of this table to the shared tuple records. |
886 | | * Array of tuple records shared across all glyph variation data tables. */ |
887 | | GidOffsetType glyphCountX; /* The number of glyphs in this font. This must match the number of |
888 | | * glyphs stored elsewhere in the font. */ |
889 | | HBUINT16 flags; /* Bit-field that gives the format of the offset array that follows. |
890 | | * If bit 0 is clear, the offsets are uint16; if bit 0 is set, the |
891 | | * offsets are uint32. */ |
892 | | Offset32To<GlyphVariationData> |
893 | | dataZ; /* Offset from the start of this table to the array of |
894 | | * GlyphVariationData tables. */ |
895 | | UnsizedArrayOf<HBUINT8> |
896 | | offsetZ; /* Offsets from the start of the GlyphVariationData array |
897 | | * to each GlyphVariationData table. */ |
898 | | public: |
899 | | DEFINE_SIZE_ARRAY (20, offsetZ); |
900 | | }; |
901 | | |
902 | | using gvar = gvar_GVAR<HBUINT16, HB_OT_TAG_gvar>; |
903 | | using GVAR = gvar_GVAR<HBUINT24, HB_OT_TAG_GVAR>; |
904 | | |
905 | | struct gvar_accelerator_t : gvar::accelerator_t { |
906 | 82.3k | gvar_accelerator_t (hb_face_t *face) : gvar::accelerator_t (face) {} |
907 | | }; |
908 | | struct GVAR_accelerator_t : GVAR::accelerator_t { |
909 | 82.3k | GVAR_accelerator_t (hb_face_t *face) : GVAR::accelerator_t (face) {} |
910 | | }; |
911 | | |
912 | | } /* namespace OT */ |
913 | | |
914 | | #endif /* HB_OT_VAR_GVAR_TABLE_HH */ |