/src/harfbuzz/src/hb-ot-var-gvar-table.hh
Line | Count | Source |
1 | | /* |
2 | | * Copyright © 2019 Adobe Inc. |
3 | | * Copyright © 2019 Ebrahim Byagowi |
4 | | * |
5 | | * This is part of HarfBuzz, a text shaping library. |
6 | | * |
7 | | * Permission is hereby granted, without written agreement and without |
8 | | * license or royalty fees, to use, copy, modify, and distribute this |
9 | | * software and its documentation for any purpose, provided that the |
10 | | * above copyright notice and the following two paragraphs appear in |
11 | | * all copies of this software. |
12 | | * |
13 | | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | | * DAMAGE. |
18 | | * |
19 | | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | | * |
25 | | * Adobe Author(s): Michiharu Ariza |
26 | | */ |
27 | | |
28 | | #ifndef HB_OT_VAR_GVAR_TABLE_HH |
29 | | #define HB_OT_VAR_GVAR_TABLE_HH |
30 | | |
31 | | #include "hb-decycler.hh" |
32 | | #include "hb-open-type.hh" |
33 | | #include "hb-ot-var-common.hh" |
34 | | |
35 | | /* |
36 | | * gvar -- Glyph Variation Table |
37 | | * https://docs.microsoft.com/en-us/typography/opentype/spec/gvar |
38 | | */ |
39 | | #define HB_OT_TAG_gvar HB_TAG('g','v','a','r') |
40 | | #define HB_OT_TAG_GVAR HB_TAG('G','V','A','R') |
41 | | |
42 | | struct hb_glyf_scratch_t |
43 | | { |
44 | | // glyf |
45 | | contour_point_vector_t all_points; |
46 | | contour_point_vector_t comp_points; |
47 | | hb_decycler_t decycler; |
48 | | |
49 | | // gvar |
50 | | contour_point_vector_t orig_points; |
51 | | hb_vector_t<int> x_deltas; |
52 | | hb_vector_t<int> y_deltas; |
53 | | contour_point_vector_t deltas; |
54 | | hb_vector_t<unsigned int> shared_indices; |
55 | | hb_vector_t<unsigned int> private_indices; |
56 | | }; |
57 | | |
58 | | namespace OT { |
59 | | |
60 | | template <typename OffsetType> |
61 | | struct glyph_variations_t |
62 | | { |
63 | | // TODO: Move tuple_variations_t to outside of TupleVariationData |
64 | | using tuple_variations_t = typename TupleVariationData<OffsetType>::tuple_variations_t; |
65 | | using GlyphVariationData = TupleVariationData<OffsetType>; |
66 | | |
67 | | hb_vector_t<tuple_variations_t> glyph_variations; |
68 | | |
69 | | hb_vector_t<F2DOT14> compiled_shared_tuples; |
70 | | private: |
71 | | unsigned shared_tuples_count = 0; |
72 | | |
73 | | /* shared coords-> index map after instantiation */ |
74 | | hb_hashmap_t<const hb_vector_t<F2DOT14>*, unsigned> shared_tuples_idx_map; |
75 | | |
76 | | hb_alloc_pool_t pool; |
77 | | |
78 | | public: |
79 | | unsigned compiled_shared_tuples_count () const |
80 | | { return shared_tuples_count; } |
81 | | |
82 | | unsigned compiled_byte_size () const |
83 | | { |
84 | | unsigned byte_size = 0; |
85 | | for (const auto& _ : glyph_variations) |
86 | | byte_size += _.get_compiled_byte_size (); |
87 | | |
88 | | return byte_size; |
89 | | } |
90 | | |
91 | | bool create_from_glyphs_var_data (unsigned axis_count, |
92 | | const hb_array_t<const F2DOT14> shared_tuples, |
93 | | const hb_subset_plan_t *plan, |
94 | | const hb_hashmap_t<hb_codepoint_t, hb_bytes_t>& new_gid_var_data_map) |
95 | | { |
96 | | if (unlikely (!glyph_variations.alloc_exact (plan->new_to_old_gid_list.length))) |
97 | | return false; |
98 | | |
99 | | auto it = hb_iter (plan->new_to_old_gid_list); |
100 | | for (auto &_ : it) |
101 | | { |
102 | | hb_codepoint_t new_gid = _.first; |
103 | | contour_point_vector_t *all_contour_points; |
104 | | if (!new_gid_var_data_map.has (new_gid) || |
105 | | !plan->new_gid_contour_points_map.has (new_gid, &all_contour_points)) |
106 | | return false; |
107 | | hb_bytes_t var_data = new_gid_var_data_map.get (new_gid); |
108 | | |
109 | | const GlyphVariationData* p = reinterpret_cast<const GlyphVariationData*> (var_data.arrayZ); |
110 | | typename GlyphVariationData::tuple_iterator_t iterator; |
111 | | tuple_variations_t tuple_vars; |
112 | | |
113 | | hb_vector_t<unsigned> shared_indices; |
114 | | |
115 | | /* in case variation data is empty, push an empty struct into the vector, |
116 | | * keep the vector in sync with the new_to_old_gid_list */ |
117 | | if (!var_data || ! p->has_data () || !all_contour_points->length || |
118 | | !GlyphVariationData::get_tuple_iterator (var_data, axis_count, |
119 | | var_data.arrayZ, |
120 | | shared_indices, &iterator)) |
121 | | { |
122 | | glyph_variations.push (std::move (tuple_vars)); |
123 | | continue; |
124 | | } |
125 | | |
126 | | bool is_composite_glyph = false; |
127 | | is_composite_glyph = plan->composite_new_gids.has (new_gid); |
128 | | |
129 | | if (!p->decompile_tuple_variations (all_contour_points->length, true /* is_gvar */, |
130 | | iterator, &(plan->axes_old_index_tag_map), |
131 | | shared_indices, shared_tuples, |
132 | | tuple_vars, /* OUT */ |
133 | | &pool, |
134 | | is_composite_glyph)) |
135 | | return false; |
136 | | glyph_variations.push (std::move (tuple_vars)); |
137 | | } |
138 | | return !glyph_variations.in_error () && glyph_variations.length == plan->new_to_old_gid_list.length; |
139 | | } |
140 | | |
141 | | bool instantiate (const hb_subset_plan_t *plan) |
142 | | { |
143 | | unsigned count = plan->new_to_old_gid_list.length; |
144 | | bool iup_optimize = false; |
145 | | optimize_scratch_t scratch; |
146 | | iup_optimize = plan->flags & HB_SUBSET_FLAGS_OPTIMIZE_IUP_DELTAS; |
147 | | for (unsigned i = 0; i < count; i++) |
148 | | { |
149 | | hb_codepoint_t new_gid = plan->new_to_old_gid_list[i].first; |
150 | | contour_point_vector_t *all_points; |
151 | | if (!plan->new_gid_contour_points_map.has (new_gid, &all_points)) |
152 | | return false; |
153 | | if (!glyph_variations[i].instantiate (plan->axes_location, plan->axes_triple_distances, scratch, &pool, all_points, iup_optimize)) |
154 | | return false; |
155 | | } |
156 | | return true; |
157 | | } |
158 | | |
159 | | bool compile_bytes (const hb_map_t& axes_index_map, |
160 | | const hb_map_t& axes_old_index_tag_map) |
161 | | { |
162 | | if (!compile_shared_tuples (axes_index_map, axes_old_index_tag_map)) |
163 | | return false; |
164 | | for (tuple_variations_t& vars: glyph_variations) |
165 | | if (!vars.compile_bytes (axes_index_map, axes_old_index_tag_map, |
166 | | true, /* use shared points*/ |
167 | | true, |
168 | | &shared_tuples_idx_map, |
169 | | &pool)) |
170 | | return false; |
171 | | |
172 | | return true; |
173 | | } |
174 | | |
175 | | bool compile_shared_tuples (const hb_map_t& axes_index_map, |
176 | | const hb_map_t& axes_old_index_tag_map) |
177 | | { |
178 | | /* key is pointer to compiled_peak_coords inside each tuple, hashing |
179 | | * function will always deref pointers first */ |
180 | | hb_hashmap_t<const hb_vector_t<F2DOT14>*, unsigned> coords_count_map; |
181 | | |
182 | | /* count the num of shared coords */ |
183 | | for (tuple_variations_t& vars: glyph_variations) |
184 | | { |
185 | | for (tuple_delta_t& var : vars.tuple_vars) |
186 | | { |
187 | | if (!var.compile_coords (axes_index_map, axes_old_index_tag_map, &pool)) |
188 | | return false; |
189 | | unsigned *count; |
190 | | unsigned hash = hb_hash (&var.compiled_peak_coords); |
191 | | if (coords_count_map.has_with_hash (&(var.compiled_peak_coords), hash, &count)) |
192 | | (*count)++; |
193 | | else |
194 | | coords_count_map.set_with_hash (&(var.compiled_peak_coords), hash, 1); |
195 | | } |
196 | | } |
197 | | |
198 | | if (!coords_count_map || coords_count_map.in_error ()) |
199 | | return false; |
200 | | |
201 | | /* add only those coords that are used more than once into the vector and sort */ |
202 | | hb_vector_t<hb_pair_t<const hb_vector_t<F2DOT14>*, unsigned>> shared_coords { |
203 | | + hb_iter (coords_count_map) |
204 | | | hb_filter ([] (const hb_pair_t<const hb_vector_t<F2DOT14>*, unsigned>& p) { return p.second > 1; }) |
205 | | }; |
206 | | if (unlikely (shared_coords.in_error ())) return false; |
207 | | |
208 | | /* no shared tuples: no coords are used more than once */ |
209 | | if (!shared_coords) return true; |
210 | | /* sorting based on the coords frequency first (high to low), then compare |
211 | | * the coords bytes */ |
212 | | shared_coords.qsort (_cmp_coords); |
213 | | |
214 | | /* build shared_coords->idx map and shared tuples byte array */ |
215 | | |
216 | | shared_tuples_count = hb_min (0xFFFu + 1, shared_coords.length); |
217 | | unsigned len = shared_tuples_count * (shared_coords[0].first->length); |
218 | | if (unlikely (!compiled_shared_tuples.alloc (len))) |
219 | | return false; |
220 | | |
221 | | for (unsigned i = 0; i < shared_tuples_count; i++) |
222 | | { |
223 | | shared_tuples_idx_map.set (shared_coords[i].first, i); |
224 | | /* add a concat() in hb_vector_t? */ |
225 | | for (auto c : shared_coords[i].first->iter ()) |
226 | | compiled_shared_tuples.push (c); |
227 | | } |
228 | | |
229 | | return true; |
230 | | } |
231 | | |
232 | | static int _cmp_coords (const void *pa, const void *pb) |
233 | | { |
234 | | const hb_pair_t<hb_vector_t<F2DOT14> *, unsigned> *a = (const hb_pair_t<hb_vector_t<F2DOT14> *, unsigned> *) pa; |
235 | | const hb_pair_t<hb_vector_t<F2DOT14> *, unsigned> *b = (const hb_pair_t<hb_vector_t<F2DOT14> *, unsigned> *) pb; |
236 | | |
237 | | if (a->second != b->second) |
238 | | return b->second - a->second; // high to low |
239 | | |
240 | | return b->first->as_array().cmp (a->first->as_array ()); |
241 | | } |
242 | | |
243 | | template<typename Iterator, |
244 | | hb_requires (hb_is_iterator (Iterator))> |
245 | | bool serialize_glyph_var_data (hb_serialize_context_t *c, |
246 | | Iterator it, |
247 | | bool long_offset, |
248 | | unsigned num_glyphs, |
249 | | char* glyph_var_data_offsets /* OUT: glyph var data offsets array */) const |
250 | | { |
251 | | TRACE_SERIALIZE (this); |
252 | | |
253 | | if (long_offset) |
254 | | { |
255 | | ((HBUINT32 *) glyph_var_data_offsets)[0] = 0; |
256 | | glyph_var_data_offsets += 4; |
257 | | } |
258 | | else |
259 | | { |
260 | | ((HBUINT16 *) glyph_var_data_offsets)[0] = 0; |
261 | | glyph_var_data_offsets += 2; |
262 | | } |
263 | | unsigned glyph_offset = 0; |
264 | | hb_codepoint_t last_gid = 0; |
265 | | unsigned idx = 0; |
266 | | |
267 | | GlyphVariationData* cur_glyph = c->start_embed<GlyphVariationData> (); |
268 | | if (!cur_glyph) return_trace (false); |
269 | | for (auto &_ : it) |
270 | | { |
271 | | hb_codepoint_t gid = _.first; |
272 | | if (long_offset) |
273 | | for (; last_gid < gid; last_gid++) |
274 | | ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset; |
275 | | else |
276 | | for (; last_gid < gid; last_gid++) |
277 | | ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2; |
278 | | |
279 | | if (idx >= glyph_variations.length) return_trace (false); |
280 | | if (!cur_glyph->serialize (c, true, glyph_variations[idx])) return_trace (false); |
281 | | GlyphVariationData* next_glyph = c->start_embed<GlyphVariationData> (); |
282 | | glyph_offset += (char *) next_glyph - (char *) cur_glyph; |
283 | | |
284 | | if (long_offset) |
285 | | ((HBUINT32 *) glyph_var_data_offsets)[gid] = glyph_offset; |
286 | | else |
287 | | ((HBUINT16 *) glyph_var_data_offsets)[gid] = glyph_offset / 2; |
288 | | |
289 | | last_gid++; |
290 | | idx++; |
291 | | cur_glyph = next_glyph; |
292 | | } |
293 | | |
294 | | if (long_offset) |
295 | | for (; last_gid < num_glyphs; last_gid++) |
296 | | ((HBUINT32 *) glyph_var_data_offsets)[last_gid] = glyph_offset; |
297 | | else |
298 | | for (; last_gid < num_glyphs; last_gid++) |
299 | | ((HBUINT16 *) glyph_var_data_offsets)[last_gid] = glyph_offset / 2; |
300 | | return_trace (true); |
301 | | } |
302 | | }; |
303 | | |
304 | | template <typename GidOffsetType, unsigned TableTag> |
305 | | struct gvar_GVAR |
306 | | { |
307 | | static constexpr hb_tag_t tableTag = TableTag; |
308 | | |
309 | | using GlyphVariationData = TupleVariationData<GidOffsetType>; |
310 | | |
311 | 0 | bool has_data () const { return version.to_int () != 0; } |
312 | | |
313 | | bool sanitize_shallow (hb_sanitize_context_t *c) const |
314 | 0 | { |
315 | 0 | TRACE_SANITIZE (this); |
316 | 0 | return_trace (c->check_struct (this) && |
317 | 0 | hb_barrier () && |
318 | 0 | (version.major == 1) && |
319 | 0 | sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) && |
320 | 0 | (is_long_offset () ? |
321 | 0 | c->check_array (get_long_offset_array (), c->get_num_glyphs () + 1) : |
322 | 0 | c->check_array (get_short_offset_array (), c->get_num_glyphs () + 1))); |
323 | 0 | } Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::sanitize_shallow(hb_sanitize_context_t*) const Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::sanitize_shallow(hb_sanitize_context_t*) const |
324 | | |
325 | | /* GlyphVariationData not sanitized here; must be checked while accessing each glyph variation data */ |
326 | | bool sanitize (hb_sanitize_context_t *c) const |
327 | 0 | { return sanitize_shallow (c); } Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::sanitize(hb_sanitize_context_t*) const Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::sanitize(hb_sanitize_context_t*) const |
328 | | |
329 | | bool decompile_glyph_variations (hb_subset_context_t *c, |
330 | | glyph_variations_t<GidOffsetType>& glyph_vars /* OUT */) const |
331 | | { |
332 | | hb_hashmap_t<hb_codepoint_t, hb_bytes_t> new_gid_var_data_map; |
333 | | auto it = hb_iter (c->plan->new_to_old_gid_list); |
334 | | if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) |
335 | | { |
336 | | new_gid_var_data_map.set (0, hb_bytes_t ()); |
337 | | it++; |
338 | | } |
339 | | |
340 | | for (auto &_ : it) |
341 | | { |
342 | | hb_codepoint_t new_gid = _.first; |
343 | | hb_codepoint_t old_gid = _.second; |
344 | | hb_bytes_t var_data_bytes = get_glyph_var_data_bytes (c->source_blob, glyphCountX, old_gid); |
345 | | new_gid_var_data_map.set (new_gid, var_data_bytes); |
346 | | } |
347 | | |
348 | | if (new_gid_var_data_map.in_error ()) return false; |
349 | | |
350 | | hb_array_t<const F2DOT14> shared_tuples = (this+sharedTuples).as_array ((unsigned) sharedTupleCount * (unsigned) axisCount); |
351 | | return glyph_vars.create_from_glyphs_var_data (axisCount, shared_tuples, c->plan, new_gid_var_data_map); |
352 | | } |
353 | | |
354 | | template<typename Iterator, |
355 | | hb_requires (hb_is_iterator (Iterator))> |
356 | | bool serialize (hb_serialize_context_t *c, |
357 | | const glyph_variations_t<GidOffsetType>& glyph_vars, |
358 | | Iterator it, |
359 | | unsigned axis_count, |
360 | | unsigned num_glyphs, |
361 | | bool force_long_offsets) const |
362 | | { |
363 | | TRACE_SERIALIZE (this); |
364 | | gvar_GVAR *out = c->allocate_min<gvar_GVAR> (); |
365 | | if (unlikely (!out)) return_trace (false); |
366 | | |
367 | | out->version.major = 1; |
368 | | out->version.minor = 0; |
369 | | out->axisCount = axis_count; |
370 | | out->glyphCountX = hb_min (0xFFFFu, num_glyphs); |
371 | | |
372 | | unsigned glyph_var_data_size = glyph_vars.compiled_byte_size (); |
373 | | /* According to the spec: If the short format (Offset16) is used for offsets, |
374 | | * the value stored is the offset divided by 2, so the maximum data size should |
375 | | * be 2 * 0xFFFFu, which is 0x1FFFEu */ |
376 | | bool long_offset = glyph_var_data_size > 0x1FFFEu || force_long_offsets; |
377 | | out->flags = long_offset ? 1 : 0; |
378 | | |
379 | | HBUINT8 *glyph_var_data_offsets = c->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false); |
380 | | if (!glyph_var_data_offsets) return_trace (false); |
381 | | |
382 | | /* shared tuples */ |
383 | | unsigned shared_tuple_count = glyph_vars.compiled_shared_tuples_count (); |
384 | | out->sharedTupleCount = shared_tuple_count; |
385 | | |
386 | | if (!shared_tuple_count) |
387 | | out->sharedTuples = 0; |
388 | | else |
389 | | { |
390 | | hb_array_t<const F2DOT14> shared_tuples = glyph_vars.compiled_shared_tuples.as_array ().copy (c); |
391 | | if (!shared_tuples.arrayZ) return_trace (false); |
392 | | out->sharedTuples = (const char *) shared_tuples.arrayZ - (char *) out; |
393 | | } |
394 | | |
395 | | char *glyph_var_data = c->start_embed<char> (); |
396 | | if (!glyph_var_data) return_trace (false); |
397 | | out->dataZ = glyph_var_data - (char *) out; |
398 | | |
399 | | return_trace (glyph_vars.serialize_glyph_var_data (c, it, long_offset, num_glyphs, |
400 | | (char *) glyph_var_data_offsets)); |
401 | | } |
402 | | |
403 | | bool instantiate (hb_subset_context_t *c) const |
404 | | { |
405 | | TRACE_SUBSET (this); |
406 | | glyph_variations_t<GidOffsetType> glyph_vars; |
407 | | if (!decompile_glyph_variations (c, glyph_vars)) |
408 | | return_trace (false); |
409 | | |
410 | | if (!glyph_vars.instantiate (c->plan)) return_trace (false); |
411 | | if (!glyph_vars.compile_bytes (c->plan->axes_index_map, c->plan->axes_old_index_tag_map)) |
412 | | return_trace (false); |
413 | | |
414 | | unsigned axis_count = c->plan->axes_index_map.get_population (); |
415 | | unsigned num_glyphs = c->plan->num_output_glyphs (); |
416 | | auto it = hb_iter (c->plan->new_to_old_gid_list); |
417 | | |
418 | | bool force_long_offsets = false; |
419 | | #ifdef HB_EXPERIMENTAL_API |
420 | | force_long_offsets = c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS; |
421 | | #endif |
422 | | return_trace (serialize (c->serializer, glyph_vars, it, axis_count, num_glyphs, force_long_offsets)); |
423 | | } |
424 | | |
425 | | bool subset (hb_subset_context_t *c) const |
426 | | { |
427 | | TRACE_SUBSET (this); |
428 | | if (c->plan->all_axes_pinned) |
429 | | return_trace (false); |
430 | | |
431 | | if (c->plan->normalized_coords) |
432 | | return_trace (instantiate (c)); |
433 | | |
434 | | unsigned glyph_count = version.to_int () ? c->plan->source->get_num_glyphs () : 0; |
435 | | |
436 | | gvar_GVAR *out = c->serializer->allocate_min<gvar_GVAR> (); |
437 | | if (unlikely (!out)) return_trace (false); |
438 | | |
439 | | out->version.major = 1; |
440 | | out->version.minor = 0; |
441 | | out->axisCount = axisCount; |
442 | | out->sharedTupleCount = sharedTupleCount; |
443 | | |
444 | | unsigned int num_glyphs = c->plan->num_output_glyphs (); |
445 | | out->glyphCountX = hb_min (0xFFFFu, num_glyphs); |
446 | | |
447 | | auto it = hb_iter (c->plan->new_to_old_gid_list); |
448 | | if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) |
449 | | it++; |
450 | | unsigned int subset_data_size = 0; |
451 | | for (auto &_ : it) |
452 | | { |
453 | | hb_codepoint_t old_gid = _.second; |
454 | | subset_data_size += get_glyph_var_data_bytes (c->source_blob, glyph_count, old_gid).length; |
455 | | } |
456 | | |
457 | | /* According to the spec: If the short format (Offset16) is used for offsets, |
458 | | * the value stored is the offset divided by 2, so the maximum data size should |
459 | | * be 2 * 0xFFFFu, which is 0x1FFFEu */ |
460 | | bool long_offset = subset_data_size > 0x1FFFEu; |
461 | | #ifdef HB_EXPERIMENTAL_API |
462 | | long_offset = long_offset || (c->plan->flags & HB_SUBSET_FLAGS_IFTB_REQUIREMENTS); |
463 | | #endif |
464 | | out->flags = long_offset ? 1 : 0; |
465 | | |
466 | | HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1), false); |
467 | | if (!subset_offsets) return_trace (false); |
468 | | |
469 | | /* shared tuples */ |
470 | | if (!sharedTupleCount || !sharedTuples) |
471 | | out->sharedTuples = 0; |
472 | | else |
473 | | { |
474 | | unsigned int shared_tuple_size = F2DOT14::static_size * axisCount * sharedTupleCount; |
475 | | F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size); |
476 | | if (!tuples) return_trace (false); |
477 | | out->sharedTuples = (char *) tuples - (char *) out; |
478 | | hb_memcpy (tuples, this+sharedTuples, shared_tuple_size); |
479 | | } |
480 | | |
481 | | /* This ordering relative to the shared tuples array, which puts the glyphVariationData |
482 | | last in the table, is required when HB_SUBSET_FLAGS_IFTB_REQUIREMENTS is set */ |
483 | | char *subset_data = c->serializer->allocate_size<char> (subset_data_size, false); |
484 | | if (!subset_data) return_trace (false); |
485 | | out->dataZ = subset_data - (char *) out; |
486 | | |
487 | | |
488 | | if (long_offset) |
489 | | { |
490 | | ((HBUINT32 *) subset_offsets)[0] = 0; |
491 | | subset_offsets += 4; |
492 | | } |
493 | | else |
494 | | { |
495 | | ((HBUINT16 *) subset_offsets)[0] = 0; |
496 | | subset_offsets += 2; |
497 | | } |
498 | | unsigned int glyph_offset = 0; |
499 | | |
500 | | hb_codepoint_t last = 0; |
501 | | it = hb_iter (c->plan->new_to_old_gid_list); |
502 | | if (it->first == 0 && !(c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) |
503 | | it++; |
504 | | for (auto &_ : it) |
505 | | { |
506 | | hb_codepoint_t gid = _.first; |
507 | | hb_codepoint_t old_gid = _.second; |
508 | | |
509 | | if (long_offset) |
510 | | for (; last < gid; last++) |
511 | | ((HBUINT32 *) subset_offsets)[last] = glyph_offset; |
512 | | else |
513 | | for (; last < gid; last++) |
514 | | ((HBUINT16 *) subset_offsets)[last] = glyph_offset / 2; |
515 | | |
516 | | hb_bytes_t var_data_bytes = get_glyph_var_data_bytes (c->source_blob, |
517 | | glyph_count, |
518 | | old_gid); |
519 | | |
520 | | hb_memcpy (subset_data, var_data_bytes.arrayZ, var_data_bytes.length); |
521 | | subset_data += var_data_bytes.length; |
522 | | glyph_offset += var_data_bytes.length; |
523 | | |
524 | | if (long_offset) |
525 | | ((HBUINT32 *) subset_offsets)[gid] = glyph_offset; |
526 | | else |
527 | | ((HBUINT16 *) subset_offsets)[gid] = glyph_offset / 2; |
528 | | |
529 | | last++; // Skip over gid |
530 | | } |
531 | | |
532 | | if (long_offset) |
533 | | for (; last < num_glyphs; last++) |
534 | | ((HBUINT32 *) subset_offsets)[last] = glyph_offset; |
535 | | else |
536 | | for (; last < num_glyphs; last++) |
537 | | ((HBUINT16 *) subset_offsets)[last] = glyph_offset / 2; |
538 | | |
539 | | return_trace (true); |
540 | | } |
541 | | |
542 | | protected: |
543 | | const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob, |
544 | | unsigned glyph_count, |
545 | | hb_codepoint_t glyph) const |
546 | 0 | { |
547 | 0 | unsigned start_offset = get_offset (glyph_count, glyph); |
548 | 0 | unsigned end_offset = get_offset (glyph_count, glyph+1); |
549 | 0 | if (unlikely (end_offset < start_offset)) return hb_bytes_t (); |
550 | 0 | unsigned length = end_offset - start_offset; |
551 | 0 | hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length); |
552 | 0 | return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t (); |
553 | 0 | } |
554 | | |
555 | 0 | bool is_long_offset () const { return flags & 1; } Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::is_long_offset() const Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::is_long_offset() const |
556 | | |
557 | | unsigned get_offset (unsigned glyph_count, unsigned i) const |
558 | 0 | { |
559 | 0 | if (unlikely (i > glyph_count)) return 0; |
560 | 0 | hb_barrier (); |
561 | 0 | return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2; |
562 | 0 | } |
563 | | |
564 | 0 | const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; } Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::get_long_offset_array() const Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::get_long_offset_array() const |
565 | 0 | const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; } Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned short, 2u>, 1735811442u>::get_short_offset_array() const Unexecuted instantiation: OT::gvar_GVAR<OT::NumType<true, unsigned int, 3u>, 1196835154u>::get_short_offset_array() const |
566 | | |
567 | | public: |
568 | | struct accelerator_t |
569 | | { |
570 | | |
571 | | hb_scalar_cache_t *create_cache () const |
572 | 0 | { |
573 | 0 | return hb_scalar_cache_t::create (table->sharedTupleCount); |
574 | 0 | } |
575 | | |
576 | | static void destroy_cache (hb_scalar_cache_t *cache) |
577 | 0 | { |
578 | 0 | hb_scalar_cache_t::destroy (cache); |
579 | 0 | } |
580 | | |
581 | 0 | bool has_data () const { return table->has_data (); } |
582 | | |
583 | | accelerator_t (hb_face_t *face) |
584 | 0 | { |
585 | 0 | table = hb_sanitize_context_t ().reference_table<gvar_GVAR> (face); |
586 | | /* If sanitize failed, set glyphCount to 0. */ |
587 | 0 | glyphCount = table->version.to_int () ? face->get_num_glyphs () : 0; |
588 | 0 | } |
589 | 0 | ~accelerator_t () { table.destroy (); } |
590 | | |
591 | | private: |
592 | | |
593 | | static float infer_delta (const hb_array_t<contour_point_t> points, |
594 | | const hb_array_t<contour_point_t> deltas, |
595 | | unsigned int target, unsigned int prev, unsigned int next, |
596 | | float contour_point_t::*m) |
597 | 0 | { |
598 | 0 | float target_val = points.arrayZ[target].*m; |
599 | 0 | float prev_val = points.arrayZ[prev].*m; |
600 | 0 | float next_val = points.arrayZ[next].*m; |
601 | 0 | float prev_delta = deltas.arrayZ[prev].*m; |
602 | 0 | float next_delta = deltas.arrayZ[next].*m; |
603 | |
|
604 | 0 | if (prev_val == next_val) |
605 | 0 | return (prev_delta == next_delta) ? prev_delta : 0.f; |
606 | 0 | else if (target_val <= hb_min (prev_val, next_val)) |
607 | 0 | return (prev_val < next_val) ? prev_delta : next_delta; |
608 | 0 | else if (target_val >= hb_max (prev_val, next_val)) |
609 | 0 | return (prev_val > next_val) ? prev_delta : next_delta; |
610 | | |
611 | | /* linear interpolation */ |
612 | 0 | float r = (target_val - prev_val) / (next_val - prev_val); |
613 | 0 | return prev_delta + r * (next_delta - prev_delta); |
614 | 0 | } |
615 | | |
616 | | static unsigned int next_index (unsigned int i, unsigned int start, unsigned int end) |
617 | 0 | { return (i >= end) ? start : (i + 1); } |
618 | | |
619 | | public: |
620 | | bool apply_deltas_to_points (hb_codepoint_t glyph, |
621 | | hb_array_t<const int> coords, |
622 | | const hb_array_t<contour_point_t> points, |
623 | | hb_glyf_scratch_t &scratch, |
624 | | hb_scalar_cache_t *gvar_cache = nullptr, |
625 | | bool phantom_only = false) const |
626 | 0 | { |
627 | 0 | if (unlikely (glyph >= glyphCount)) return true; |
628 | | |
629 | 0 | hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyphCount, glyph); |
630 | 0 | if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true; |
631 | | |
632 | 0 | auto &shared_indices = scratch.shared_indices; |
633 | 0 | shared_indices.clear (); |
634 | |
|
635 | 0 | typename GlyphVariationData::tuple_iterator_t iterator; |
636 | 0 | if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount, |
637 | 0 | var_data_bytes.arrayZ, |
638 | 0 | shared_indices, &iterator)) |
639 | 0 | return true; /* so isn't applied at all */ |
640 | | |
641 | | /* Save original points for inferred delta calculation */ |
642 | 0 | auto &orig_points_vec = scratch.orig_points; |
643 | 0 | orig_points_vec.clear (); // Populated lazily |
644 | 0 | auto orig_points = orig_points_vec.as_array (); |
645 | | |
646 | | /* flag is used to indicate referenced point */ |
647 | 0 | auto &deltas_vec = scratch.deltas; |
648 | 0 | deltas_vec.clear (); // Populated lazily |
649 | 0 | auto deltas = deltas_vec.as_array (); |
650 | |
|
651 | 0 | unsigned num_coords = table->axisCount; |
652 | 0 | hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * num_coords); |
653 | |
|
654 | 0 | auto &private_indices = scratch.private_indices; |
655 | 0 | auto &x_deltas = scratch.x_deltas; |
656 | 0 | auto &y_deltas = scratch.y_deltas; |
657 | |
|
658 | 0 | unsigned count = points.length; |
659 | 0 | bool flush = false; |
660 | |
|
661 | 0 | do |
662 | 0 | { |
663 | 0 | float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples, |
664 | 0 | gvar_cache); |
665 | |
|
666 | 0 | if (scalar == 0.f) continue; |
667 | 0 | const HBUINT8 *p = iterator.get_serialized_data (); |
668 | 0 | unsigned int length = iterator.current_tuple->get_data_size (); |
669 | 0 | if (unlikely (!iterator.var_data_bytes.check_range (p, length))) |
670 | 0 | return false; |
671 | | |
672 | 0 | if (!deltas) |
673 | 0 | { |
674 | 0 | if (unlikely (!deltas_vec.resize_dirty (count))) return false; |
675 | 0 | deltas = deltas_vec.as_array (); |
676 | 0 | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, |
677 | 0 | (phantom_only ? 4 : count) * sizeof (deltas[0])); |
678 | 0 | } |
679 | | |
680 | 0 | const HBUINT8 *end = p + length; |
681 | |
|
682 | 0 | bool has_private_points = iterator.current_tuple->has_private_points (); |
683 | 0 | if (has_private_points && |
684 | 0 | !GlyphVariationData::decompile_points (p, private_indices, end)) |
685 | 0 | return false; |
686 | 0 | const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices; |
687 | |
|
688 | 0 | bool apply_to_all = (indices.length == 0); |
689 | 0 | unsigned num_deltas = apply_to_all ? points.length : indices.length; |
690 | 0 | unsigned start_deltas = (phantom_only && num_deltas >= 4 ? num_deltas - 4 : 0); |
691 | 0 | if (unlikely (!x_deltas.resize_dirty (num_deltas))) return false; |
692 | 0 | if (unlikely (!GlyphVariationData::decompile_deltas (p, x_deltas, end, false, start_deltas))) return false; |
693 | 0 | if (unlikely (!y_deltas.resize_dirty (num_deltas))) return false; |
694 | 0 | if (unlikely (!GlyphVariationData::decompile_deltas (p, y_deltas, end, false, start_deltas))) return false; |
695 | | |
696 | 0 | if (!apply_to_all) |
697 | 0 | { |
698 | 0 | if (!orig_points && !phantom_only) |
699 | 0 | { |
700 | 0 | orig_points_vec.extend (points); |
701 | 0 | if (unlikely (orig_points_vec.in_error ())) return false; |
702 | 0 | orig_points = orig_points_vec.as_array (); |
703 | 0 | } |
704 | | |
705 | 0 | if (flush) |
706 | 0 | { |
707 | 0 | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
708 | 0 | points.arrayZ[i].translate (deltas.arrayZ[i]); |
709 | 0 | flush = false; |
710 | |
|
711 | 0 | } |
712 | 0 | hb_memset (deltas.arrayZ + (phantom_only ? count - 4 : 0), 0, |
713 | 0 | (phantom_only ? 4 : count) * sizeof (deltas[0])); |
714 | 0 | } |
715 | | |
716 | 0 | if (HB_OPTIMIZE_SIZE_VAL) |
717 | 0 | { |
718 | 0 | for (unsigned int i = 0; i < num_deltas; i++) |
719 | 0 | { |
720 | 0 | unsigned int pt_index; |
721 | 0 | if (apply_to_all) |
722 | 0 | pt_index = i; |
723 | 0 | else |
724 | 0 | { |
725 | 0 | pt_index = indices[i]; |
726 | 0 | if (unlikely (pt_index >= deltas.length)) continue; |
727 | 0 | } |
728 | 0 | if (phantom_only && pt_index < count - 4) continue; |
729 | 0 | auto &delta = deltas.arrayZ[pt_index]; |
730 | 0 | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
731 | 0 | delta.add_delta (x_deltas.arrayZ[i] * scalar, |
732 | 0 | y_deltas.arrayZ[i] * scalar); |
733 | 0 | } |
734 | 0 | } |
735 | 0 | else |
736 | 0 | { |
737 | | /* Ouch. Four cases... for optimization. */ |
738 | 0 | if (scalar != 1.0f) |
739 | 0 | { |
740 | 0 | if (apply_to_all) |
741 | 0 | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
742 | 0 | { |
743 | 0 | auto &delta = deltas.arrayZ[i]; |
744 | 0 | delta.add_delta (x_deltas.arrayZ[i] * scalar, |
745 | 0 | y_deltas.arrayZ[i] * scalar); |
746 | 0 | } |
747 | 0 | else |
748 | 0 | for (unsigned int i = 0; i < num_deltas; i++) |
749 | 0 | { |
750 | 0 | unsigned int pt_index = indices[i]; |
751 | 0 | if (unlikely (pt_index >= deltas.length)) continue; |
752 | 0 | if (phantom_only && pt_index < count - 4) continue; |
753 | 0 | auto &delta = deltas.arrayZ[pt_index]; |
754 | 0 | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
755 | 0 | delta.add_delta (x_deltas.arrayZ[i] * scalar, |
756 | 0 | y_deltas.arrayZ[i] * scalar); |
757 | 0 | } |
758 | 0 | } |
759 | 0 | else |
760 | 0 | { |
761 | 0 | if (apply_to_all) |
762 | 0 | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
763 | 0 | { |
764 | 0 | auto &delta = deltas.arrayZ[i]; |
765 | 0 | delta.add_delta (x_deltas.arrayZ[i], |
766 | 0 | y_deltas.arrayZ[i]); |
767 | 0 | } |
768 | 0 | else |
769 | 0 | for (unsigned int i = 0; i < num_deltas; i++) |
770 | 0 | { |
771 | 0 | unsigned int pt_index = indices[i]; |
772 | 0 | if (unlikely (pt_index >= deltas.length)) continue; |
773 | 0 | if (phantom_only && pt_index < count - 4) continue; |
774 | 0 | auto &delta = deltas.arrayZ[pt_index]; |
775 | 0 | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
776 | 0 | delta.add_delta (x_deltas.arrayZ[i], |
777 | 0 | y_deltas.arrayZ[i]); |
778 | 0 | } |
779 | 0 | } |
780 | 0 | } |
781 | | |
782 | | /* infer deltas for unreferenced points */ |
783 | 0 | if (!apply_to_all && !phantom_only) |
784 | 0 | { |
785 | 0 | unsigned start_point = 0; |
786 | 0 | unsigned end_point = 0; |
787 | 0 | while (true) |
788 | 0 | { |
789 | 0 | while (end_point < count && !points.arrayZ[end_point].is_end_point) |
790 | 0 | end_point++; |
791 | 0 | if (unlikely (end_point == count)) break; |
792 | | |
793 | | /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */ |
794 | 0 | unsigned unref_count = 0; |
795 | 0 | for (unsigned i = start_point; i < end_point + 1; i++) |
796 | 0 | unref_count += deltas.arrayZ[i].flag; |
797 | 0 | unref_count = (end_point - start_point + 1) - unref_count; |
798 | |
|
799 | 0 | unsigned j = start_point; |
800 | 0 | if (unref_count == 0 || unref_count > end_point - start_point) |
801 | 0 | goto no_more_gaps; |
802 | | |
803 | 0 | for (;;) |
804 | 0 | { |
805 | | /* Locate the next gap of unreferenced points between two referenced points prev and next. |
806 | | * Note that a gap may wrap around at left (start_point) and/or at right (end_point). |
807 | | */ |
808 | 0 | unsigned int prev, next, i; |
809 | 0 | for (;;) |
810 | 0 | { |
811 | 0 | i = j; |
812 | 0 | j = next_index (i, start_point, end_point); |
813 | 0 | if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break; |
814 | 0 | } |
815 | 0 | prev = j = i; |
816 | 0 | for (;;) |
817 | 0 | { |
818 | 0 | i = j; |
819 | 0 | j = next_index (i, start_point, end_point); |
820 | 0 | if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break; |
821 | 0 | } |
822 | 0 | next = j; |
823 | | /* Infer deltas for all unref points in the gap between prev and next */ |
824 | 0 | i = prev; |
825 | 0 | for (;;) |
826 | 0 | { |
827 | 0 | i = next_index (i, start_point, end_point); |
828 | 0 | if (i == next) break; |
829 | 0 | deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x); |
830 | 0 | deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y); |
831 | 0 | if (--unref_count == 0) goto no_more_gaps; |
832 | 0 | } |
833 | 0 | } |
834 | 0 | no_more_gaps: |
835 | 0 | start_point = end_point = end_point + 1; |
836 | 0 | } |
837 | 0 | } |
838 | | |
839 | 0 | flush = true; |
840 | |
|
841 | 0 | } while (iterator.move_to_next ()); |
842 | | |
843 | 0 | if (flush) |
844 | 0 | { |
845 | 0 | for (unsigned int i = phantom_only ? count - 4 : 0; i < count; i++) |
846 | 0 | points.arrayZ[i].translate (deltas.arrayZ[i]); |
847 | 0 | } |
848 | |
|
849 | 0 | return true; |
850 | 0 | } |
851 | | |
852 | | unsigned int get_axis_count () const { return table->axisCount; } |
853 | | |
854 | | private: |
855 | | hb_blob_ptr_t<gvar_GVAR> table; |
856 | | unsigned glyphCount; |
857 | | }; |
858 | | |
859 | | protected: |
860 | | FixedVersion<>version; /* Version number of the glyph variations table |
861 | | * Set to 0x00010000u. */ |
862 | | HBUINT16 axisCount; /* The number of variation axes for this font. This must be |
863 | | * the same number as axisCount in the 'fvar' table. */ |
864 | | HBUINT16 sharedTupleCount; |
865 | | /* The number of shared tuple records. Shared tuple records |
866 | | * can be referenced within glyph variation data tables for |
867 | | * multiple glyphs, as opposed to other tuple records stored |
868 | | * directly within a glyph variation data table. */ |
869 | | NNOffset32To<UnsizedArrayOf<F2DOT14>> |
870 | | sharedTuples; /* Offset from the start of this table to the shared tuple records. |
871 | | * Array of tuple records shared across all glyph variation data tables. */ |
872 | | GidOffsetType glyphCountX; /* The number of glyphs in this font. This must match the number of |
873 | | * glyphs stored elsewhere in the font. */ |
874 | | HBUINT16 flags; /* Bit-field that gives the format of the offset array that follows. |
875 | | * If bit 0 is clear, the offsets are uint16; if bit 0 is set, the |
876 | | * offsets are uint32. */ |
877 | | Offset32To<GlyphVariationData> |
878 | | dataZ; /* Offset from the start of this table to the array of |
879 | | * GlyphVariationData tables. */ |
880 | | UnsizedArrayOf<HBUINT8> |
881 | | offsetZ; /* Offsets from the start of the GlyphVariationData array |
882 | | * to each GlyphVariationData table. */ |
883 | | public: |
884 | | DEFINE_SIZE_ARRAY (20, offsetZ); |
885 | | }; |
886 | | |
887 | | using gvar = gvar_GVAR<HBUINT16, HB_OT_TAG_gvar>; |
888 | | using GVAR = gvar_GVAR<HBUINT24, HB_OT_TAG_GVAR>; |
889 | | |
890 | | struct gvar_accelerator_t : gvar::accelerator_t { |
891 | 0 | gvar_accelerator_t (hb_face_t *face) : gvar::accelerator_t (face) {} |
892 | | }; |
893 | | struct GVAR_accelerator_t : GVAR::accelerator_t { |
894 | 0 | GVAR_accelerator_t (hb_face_t *face) : GVAR::accelerator_t (face) {} |
895 | | }; |
896 | | |
897 | | } /* namespace OT */ |
898 | | |
899 | | #endif /* HB_OT_VAR_GVAR_TABLE_HH */ |