/src/harfbuzz/src/hb-ot-var-gvar-table.hh
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright © 2019 Adobe Inc. |
3 | | * Copyright © 2019 Ebrahim Byagowi |
4 | | * |
5 | | * This is part of HarfBuzz, a text shaping library. |
6 | | * |
7 | | * Permission is hereby granted, without written agreement and without |
8 | | * license or royalty fees, to use, copy, modify, and distribute this |
9 | | * software and its documentation for any purpose, provided that the |
10 | | * above copyright notice and the following two paragraphs appear in |
11 | | * all copies of this software. |
12 | | * |
13 | | * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR |
14 | | * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES |
15 | | * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN |
16 | | * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH |
17 | | * DAMAGE. |
18 | | * |
19 | | * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, |
20 | | * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND |
21 | | * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS |
22 | | * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO |
23 | | * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. |
24 | | * |
25 | | * Adobe Author(s): Michiharu Ariza |
26 | | */ |
27 | | |
28 | | #ifndef HB_OT_VAR_GVAR_TABLE_HH |
29 | | #define HB_OT_VAR_GVAR_TABLE_HH |
30 | | |
31 | | #include "hb-open-type.hh" |
32 | | |
33 | | /* |
34 | | * gvar -- Glyph Variation Table |
35 | | * https://docs.microsoft.com/en-us/typography/opentype/spec/gvar |
36 | | */ |
37 | | #define HB_OT_TAG_gvar HB_TAG('g','v','a','r') |
38 | | |
39 | | namespace OT { |
40 | | |
41 | | struct contour_point_t |
42 | | { |
43 | | void init (float x_ = 0.f, float y_ = 0.f, bool is_end_point_ = false) |
44 | 791k | { flag = 0; x = x_; y = y_; is_end_point = is_end_point_; } |
45 | | |
46 | 120M | void translate (const contour_point_t &p) { x += p.x; y += p.y; } |
47 | | |
48 | | float x = 0.f; |
49 | | float y = 0.f; |
50 | | uint8_t flag = 0; |
51 | | bool is_end_point = false; |
52 | | }; |
53 | | |
54 | | struct contour_point_vector_t : hb_vector_t<contour_point_t> |
55 | | { |
56 | | void extend (const hb_array_t<contour_point_t> &a) |
57 | 1.31M | { |
58 | 1.31M | unsigned int old_len = length; |
59 | 1.31M | if (unlikely (!resize (old_len + a.length, false))) |
60 | 11.1k | return; |
61 | 1.30M | auto arrayZ = this->arrayZ + old_len; |
62 | 1.30M | unsigned count = a.length; |
63 | 1.30M | hb_memcpy (arrayZ, a.arrayZ, count * sizeof (arrayZ[0])); |
64 | 1.30M | } |
65 | | |
66 | | void transform (const float (&matrix)[4]) |
67 | 498k | { |
68 | 498k | if (matrix[0] == 1.f && matrix[1] == 0.f && |
69 | 498k | matrix[2] == 0.f && matrix[3] == 1.f) |
70 | 189k | return; |
71 | 309k | auto arrayZ = this->arrayZ; |
72 | 309k | unsigned count = length; |
73 | 16.5M | for (unsigned i = 0; i < count; i++) |
74 | 16.2M | { |
75 | 16.2M | contour_point_t &p = arrayZ[i]; |
76 | 16.2M | float x_ = p.x * matrix[0] + p.y * matrix[2]; |
77 | 16.2M | p.y = p.x * matrix[1] + p.y * matrix[3]; |
78 | 16.2M | p.x = x_; |
79 | 16.2M | } |
80 | 309k | } |
81 | | |
82 | | void translate (const contour_point_t& delta) |
83 | 826k | { |
84 | 826k | if (delta.x == 0.f && delta.y == 0.f) |
85 | 494k | return; |
86 | 331k | auto arrayZ = this->arrayZ; |
87 | 331k | unsigned count = length; |
88 | 121M | for (unsigned i = 0; i < count; i++) |
89 | 120M | arrayZ[i].translate (delta); |
90 | 331k | } |
91 | | }; |
92 | | |
93 | | /* https://docs.microsoft.com/en-us/typography/opentype/spec/otvarcommonformats#tuplevariationheader */ |
94 | | struct TupleVariationHeader |
95 | | { |
96 | | unsigned get_size (unsigned axis_count) const |
97 | 464k | { return min_size + get_all_tuples (axis_count).get_size (); } |
98 | | |
99 | 581k | unsigned get_data_size () const { return varDataSize; } |
100 | | |
101 | | const TupleVariationHeader &get_next (unsigned axis_count) const |
102 | 165k | { return StructAtOffset<TupleVariationHeader> (this, get_size (axis_count)); } |
103 | | |
104 | | float calculate_scalar (hb_array_t<int> coords, unsigned int coord_count, |
105 | | const hb_array_t<const F2DOT14> shared_tuples) const |
106 | 256k | { |
107 | 256k | hb_array_t<const F2DOT14> peak_tuple; |
108 | | |
109 | 256k | if (has_peak ()) |
110 | 75.1k | peak_tuple = get_peak_tuple (coord_count); |
111 | 181k | else |
112 | 181k | { |
113 | 181k | unsigned int index = get_index (); |
114 | 181k | if (unlikely (index * coord_count >= shared_tuples.length)) |
115 | 72.6k | return 0.f; |
116 | 108k | peak_tuple = shared_tuples.sub_array (coord_count * index, coord_count); |
117 | 108k | } |
118 | | |
119 | 183k | hb_array_t<const F2DOT14> start_tuple; |
120 | 183k | hb_array_t<const F2DOT14> end_tuple; |
121 | 183k | if (has_intermediate ()) |
122 | 66.4k | { |
123 | 66.4k | start_tuple = get_start_tuple (coord_count); |
124 | 66.4k | end_tuple = get_end_tuple (coord_count); |
125 | 66.4k | } |
126 | | |
127 | 183k | float scalar = 1.f; |
128 | 411k | for (unsigned int i = 0; i < coord_count; i++) |
129 | 292k | { |
130 | 292k | int v = coords[i]; |
131 | 292k | int peak = peak_tuple[i].to_int (); |
132 | 292k | if (!peak || v == peak) continue; |
133 | | |
134 | 195k | if (has_intermediate ()) |
135 | 85.7k | { |
136 | 85.7k | int start = start_tuple[i].to_int (); |
137 | 85.7k | int end = end_tuple[i].to_int (); |
138 | 85.7k | if (unlikely (start > peak || peak > end || |
139 | 85.7k | (start < 0 && end > 0 && peak))) continue; |
140 | 17.2k | if (v < start || v > end) return 0.f; |
141 | 6.38k | if (v < peak) |
142 | 3.55k | { if (peak != start) scalar *= (float) (v - start) / (peak - start); } |
143 | 2.82k | else |
144 | 2.82k | { if (peak != end) scalar *= (float) (end - v) / (end - peak); } |
145 | 6.38k | } |
146 | 110k | else if (!v || v < hb_min (0, peak) || v > hb_max (0, peak)) return 0.f; |
147 | 56.2k | else |
148 | 56.2k | scalar *= (float) v / peak; |
149 | 195k | } |
150 | 118k | return scalar; |
151 | 183k | } |
152 | | |
153 | 1.06M | bool has_peak () const { return tupleIndex & TuppleIndex::EmbeddedPeakTuple; } |
154 | 1.05M | bool has_intermediate () const { return tupleIndex & TuppleIndex::IntermediateRegion; } |
155 | 104k | bool has_private_points () const { return tupleIndex & TuppleIndex::PrivatePointNumbers; } |
156 | 181k | unsigned get_index () const { return tupleIndex & TuppleIndex::TupleIndexMask; } |
157 | | |
158 | | protected: |
159 | | struct TuppleIndex : HBUINT16 |
160 | | { |
161 | | enum Flags { |
162 | | EmbeddedPeakTuple = 0x8000u, |
163 | | IntermediateRegion = 0x4000u, |
164 | | PrivatePointNumbers = 0x2000u, |
165 | | TupleIndexMask = 0x0FFFu |
166 | | }; |
167 | | |
168 | | DEFINE_SIZE_STATIC (2); |
169 | | }; |
170 | | |
171 | | hb_array_t<const F2DOT14> get_all_tuples (unsigned axis_count) const |
172 | 672k | { return StructAfter<UnsizedArrayOf<F2DOT14>> (tupleIndex).as_array ((has_peak () + has_intermediate () * 2) * axis_count); } |
173 | | hb_array_t<const F2DOT14> get_peak_tuple (unsigned axis_count) const |
174 | 75.1k | { return get_all_tuples (axis_count).sub_array (0, axis_count); } |
175 | | hb_array_t<const F2DOT14> get_start_tuple (unsigned axis_count) const |
176 | 66.4k | { return get_all_tuples (axis_count).sub_array (has_peak () * axis_count, axis_count); } |
177 | | hb_array_t<const F2DOT14> get_end_tuple (unsigned axis_count) const |
178 | 66.4k | { return get_all_tuples (axis_count).sub_array (has_peak () * axis_count + axis_count, axis_count); } |
179 | | |
180 | | HBUINT16 varDataSize; /* The size in bytes of the serialized |
181 | | * data for this tuple variation table. */ |
182 | | TuppleIndex tupleIndex; /* A packed field. The high 4 bits are flags (see below). |
183 | | The low 12 bits are an index into a shared tuple |
184 | | records array. */ |
185 | | /* UnsizedArrayOf<F2DOT14> peakTuple - optional */ |
186 | | /* Peak tuple record for this tuple variation table — optional, |
187 | | * determined by flags in the tupleIndex value. |
188 | | * |
189 | | * Note that this must always be included in the 'cvar' table. */ |
190 | | /* UnsizedArrayOf<F2DOT14> intermediateStartTuple - optional */ |
191 | | /* Intermediate start tuple record for this tuple variation table — optional, |
192 | | determined by flags in the tupleIndex value. */ |
193 | | /* UnsizedArrayOf<F2DOT14> intermediateEndTuple - optional */ |
194 | | /* Intermediate end tuple record for this tuple variation table — optional, |
195 | | * determined by flags in the tupleIndex value. */ |
196 | | public: |
197 | | DEFINE_SIZE_MIN (4); |
198 | | }; |
199 | | |
200 | | struct GlyphVariationData |
201 | | { |
202 | | const TupleVariationHeader &get_tuple_var_header (void) const |
203 | 181k | { return StructAfter<TupleVariationHeader> (data); } |
204 | | |
205 | | struct tuple_iterator_t |
206 | | { |
207 | | void init (hb_bytes_t var_data_bytes_, unsigned int axis_count_) |
208 | 181k | { |
209 | 181k | var_data_bytes = var_data_bytes_; |
210 | 181k | var_data = var_data_bytes_.as<GlyphVariationData> (); |
211 | 181k | index = 0; |
212 | 181k | axis_count = axis_count_; |
213 | 181k | current_tuple = &var_data->get_tuple_var_header (); |
214 | 181k | data_offset = 0; |
215 | 181k | } |
216 | | |
217 | | bool get_shared_indices (hb_vector_t<unsigned int> &shared_indices /* OUT */) |
218 | 181k | { |
219 | 181k | if (var_data->has_shared_point_numbers ()) |
220 | 123k | { |
221 | 123k | const HBUINT8 *base = &(var_data+var_data->data); |
222 | 123k | const HBUINT8 *p = base; |
223 | 123k | if (!unpack_points (p, shared_indices, (const HBUINT8 *) (var_data_bytes.arrayZ + var_data_bytes.length))) return false; |
224 | 102k | data_offset = p - base; |
225 | 102k | } |
226 | 161k | return true; |
227 | 181k | } |
228 | | |
229 | | bool is_valid () const |
230 | 327k | { |
231 | 327k | return (index < var_data->tupleVarCount.get_count ()) && |
232 | 327k | var_data_bytes.check_range (current_tuple, TupleVariationHeader::min_size) && |
233 | 327k | var_data_bytes.check_range (current_tuple, hb_max (current_tuple->get_data_size (), |
234 | 298k | current_tuple->get_size (axis_count))); |
235 | 327k | } |
236 | | |
237 | | bool move_to_next () |
238 | 165k | { |
239 | 165k | data_offset += current_tuple->get_data_size (); |
240 | 165k | current_tuple = ¤t_tuple->get_next (axis_count); |
241 | 165k | index++; |
242 | 165k | return is_valid (); |
243 | 165k | } |
244 | | |
245 | | const HBUINT8 *get_serialized_data () const |
246 | 117k | { return &(var_data+var_data->data) + data_offset; } |
247 | | |
248 | | private: |
249 | | const GlyphVariationData *var_data; |
250 | | unsigned int index; |
251 | | unsigned int axis_count; |
252 | | unsigned int data_offset; |
253 | | |
254 | | public: |
255 | | hb_bytes_t var_data_bytes; |
256 | | const TupleVariationHeader *current_tuple; |
257 | | }; |
258 | | |
259 | | static bool get_tuple_iterator (hb_bytes_t var_data_bytes, unsigned axis_count, |
260 | | hb_vector_t<unsigned int> &shared_indices /* OUT */, |
261 | | tuple_iterator_t *iterator /* OUT */) |
262 | 181k | { |
263 | 181k | iterator->init (var_data_bytes, axis_count); |
264 | 181k | if (!iterator->get_shared_indices (shared_indices)) |
265 | 20.1k | return false; |
266 | 161k | return iterator->is_valid (); |
267 | 181k | } |
268 | | |
269 | 181k | bool has_shared_point_numbers () const { return tupleVarCount.has_shared_point_numbers (); } |
270 | | |
271 | | static bool unpack_points (const HBUINT8 *&p /* IN/OUT */, |
272 | | hb_vector_t<unsigned int> &points /* OUT */, |
273 | | const HBUINT8 *end) |
274 | 170k | { |
275 | 170k | enum packed_point_flag_t |
276 | 170k | { |
277 | 170k | POINTS_ARE_WORDS = 0x80, |
278 | 170k | POINT_RUN_COUNT_MASK = 0x7F |
279 | 170k | }; |
280 | | |
281 | 170k | if (unlikely (p + 1 > end)) return false; |
282 | | |
283 | 163k | unsigned count = *p++; |
284 | 163k | if (count & POINTS_ARE_WORDS) |
285 | 13.3k | { |
286 | 13.3k | if (unlikely (p + 1 > end)) return false; |
287 | 10.9k | count = ((count & POINT_RUN_COUNT_MASK) << 8) | *p++; |
288 | 10.9k | } |
289 | 161k | if (unlikely (!points.resize (count, false))) return false; |
290 | | |
291 | 159k | unsigned n = 0; |
292 | 159k | unsigned i = 0; |
293 | 344k | while (i < count) |
294 | 210k | { |
295 | 210k | if (unlikely (p + 1 > end)) return false; |
296 | 207k | unsigned control = *p++; |
297 | 207k | unsigned run_count = (control & POINT_RUN_COUNT_MASK) + 1; |
298 | 207k | if (unlikely (i + run_count > count)) return false; |
299 | 198k | unsigned j; |
300 | 198k | if (control & POINTS_ARE_WORDS) |
301 | 29.8k | { |
302 | 29.8k | if (unlikely (p + run_count * HBUINT16::static_size > end)) return false; |
303 | 583k | for (j = 0; j < run_count; j++, i++) |
304 | 562k | { |
305 | 562k | n += *(const HBUINT16 *)p; |
306 | 562k | points.arrayZ[i] = n; |
307 | 562k | p += HBUINT16::static_size; |
308 | 562k | } |
309 | 20.4k | } |
310 | 168k | else |
311 | 168k | { |
312 | 168k | if (unlikely (p + run_count > end)) return false; |
313 | 1.40M | for (j = 0; j < run_count; j++, i++) |
314 | 1.23M | { |
315 | 1.23M | n += *p++; |
316 | 1.23M | points.arrayZ[i] = n; |
317 | 1.23M | } |
318 | 164k | } |
319 | 198k | } |
320 | 134k | return true; |
321 | 159k | } |
322 | | |
323 | | static bool unpack_deltas (const HBUINT8 *&p /* IN/OUT */, |
324 | | hb_vector_t<int> &deltas /* IN/OUT */, |
325 | | const HBUINT8 *end) |
326 | 135k | { |
327 | 135k | enum packed_delta_flag_t |
328 | 135k | { |
329 | 135k | DELTAS_ARE_ZERO = 0x80, |
330 | 135k | DELTAS_ARE_WORDS = 0x40, |
331 | 135k | DELTA_RUN_COUNT_MASK = 0x3F |
332 | 135k | }; |
333 | | |
334 | 135k | unsigned i = 0; |
335 | 135k | unsigned count = deltas.length; |
336 | 609k | while (i < count) |
337 | 532k | { |
338 | 532k | if (unlikely (p + 1 > end)) return false; |
339 | 520k | unsigned control = *p++; |
340 | 520k | unsigned run_count = (control & DELTA_RUN_COUNT_MASK) + 1; |
341 | 520k | if (unlikely (i + run_count > count)) return false; |
342 | 486k | unsigned j; |
343 | 486k | if (control & DELTAS_ARE_ZERO) |
344 | 102k | { |
345 | 1.86M | for (j = 0; j < run_count; j++, i++) |
346 | 1.76M | deltas.arrayZ[i] = 0; |
347 | 102k | } |
348 | 384k | else if (control & DELTAS_ARE_WORDS) |
349 | 41.5k | { |
350 | 41.5k | if (unlikely (p + run_count * HBUINT16::static_size > end)) return false; |
351 | 316k | for (j = 0; j < run_count; j++, i++) |
352 | 279k | { |
353 | 279k | deltas.arrayZ[i] = * (const HBINT16 *) p; |
354 | 279k | p += HBUINT16::static_size; |
355 | 279k | } |
356 | 36.9k | } |
357 | 342k | else |
358 | 342k | { |
359 | 342k | if (unlikely (p + run_count > end)) return false; |
360 | 1.40M | for (j = 0; j < run_count; j++, i++) |
361 | 1.07M | { |
362 | 1.07M | deltas.arrayZ[i] = * (const HBINT8 *) p++; |
363 | 1.07M | } |
364 | 334k | } |
365 | 486k | } |
366 | 76.9k | return true; |
367 | 135k | } |
368 | | |
369 | 326k | bool has_data () const { return tupleVarCount; } |
370 | | |
371 | | protected: |
372 | | struct TupleVarCount : HBUINT16 |
373 | | { |
374 | 181k | bool has_shared_point_numbers () const { return ((*this) & SharedPointNumbers); } |
375 | 327k | unsigned int get_count () const { return (*this) & CountMask; } |
376 | | |
377 | | protected: |
378 | | enum Flags |
379 | | { |
380 | | SharedPointNumbers= 0x8000u, |
381 | | CountMask = 0x0FFFu |
382 | | }; |
383 | | public: |
384 | | DEFINE_SIZE_STATIC (2); |
385 | | }; |
386 | | |
387 | | TupleVarCount tupleVarCount; /* A packed field. The high 4 bits are flags, and the |
388 | | * low 12 bits are the number of tuple variation tables |
389 | | * for this glyph. The number of tuple variation tables |
390 | | * can be any number between 1 and 4095. */ |
391 | | Offset16To<HBUINT8> |
392 | | data; /* Offset from the start of the GlyphVariationData table |
393 | | * to the serialized data. */ |
394 | | /* TupleVariationHeader tupleVariationHeaders[] *//* Array of tuple variation headers. */ |
395 | | public: |
396 | | DEFINE_SIZE_MIN (4); |
397 | | }; |
398 | | |
399 | | struct gvar |
400 | | { |
401 | | static constexpr hb_tag_t tableTag = HB_OT_TAG_gvar; |
402 | | |
403 | | bool sanitize_shallow (hb_sanitize_context_t *c) const |
404 | 19.2k | { |
405 | 19.2k | TRACE_SANITIZE (this); |
406 | 19.2k | return_trace (c->check_struct (this) && (version.major == 1) && |
407 | 19.2k | sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) && |
408 | 19.2k | (is_long_offset () ? |
409 | 19.2k | c->check_array (get_long_offset_array (), glyphCount+1) : |
410 | 19.2k | c->check_array (get_short_offset_array (), glyphCount+1))); |
411 | 19.2k | } |
412 | | |
413 | | /* GlyphVariationData not sanitized here; must be checked while accessing each glyph variation data */ |
414 | | bool sanitize (hb_sanitize_context_t *c) const |
415 | 19.2k | { return sanitize_shallow (c); } |
416 | | |
417 | | bool subset (hb_subset_context_t *c) const |
418 | 0 | { |
419 | 0 | TRACE_SUBSET (this); |
420 | 0 |
|
421 | 0 | gvar *out = c->serializer->allocate_min<gvar> (); |
422 | 0 | if (unlikely (!out)) return_trace (false); |
423 | 0 |
|
424 | 0 | out->version.major = 1; |
425 | 0 | out->version.minor = 0; |
426 | 0 | out->axisCount = axisCount; |
427 | 0 | out->sharedTupleCount = sharedTupleCount; |
428 | 0 |
|
429 | 0 | unsigned int num_glyphs = c->plan->num_output_glyphs (); |
430 | 0 | out->glyphCount = num_glyphs; |
431 | 0 |
|
432 | 0 | unsigned int subset_data_size = 0; |
433 | 0 | for (hb_codepoint_t gid = (c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE) ? 0 : 1; |
434 | 0 | gid < num_glyphs; |
435 | 0 | gid++) |
436 | 0 | { |
437 | 0 | hb_codepoint_t old_gid; |
438 | 0 | if (!c->plan->old_gid_for_new_gid (gid, &old_gid)) continue; |
439 | 0 | subset_data_size += get_glyph_var_data_bytes (c->source_blob, old_gid).length; |
440 | 0 | } |
441 | 0 |
|
442 | 0 | bool long_offset = subset_data_size & ~0xFFFFu; |
443 | 0 | out->flags = long_offset ? 1 : 0; |
444 | 0 |
|
445 | 0 | HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1)); |
446 | 0 | if (!subset_offsets) return_trace (false); |
447 | 0 |
|
448 | 0 | /* shared tuples */ |
449 | 0 | if (!sharedTupleCount || !sharedTuples) |
450 | 0 | out->sharedTuples = 0; |
451 | 0 | else |
452 | 0 | { |
453 | 0 | unsigned int shared_tuple_size = F2DOT14::static_size * axisCount * sharedTupleCount; |
454 | 0 | F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size); |
455 | 0 | if (!tuples) return_trace (false); |
456 | 0 | out->sharedTuples = (char *) tuples - (char *) out; |
457 | 0 | hb_memcpy (tuples, this+sharedTuples, shared_tuple_size); |
458 | 0 | } |
459 | 0 |
|
460 | 0 | char *subset_data = c->serializer->allocate_size<char> (subset_data_size); |
461 | 0 | if (!subset_data) return_trace (false); |
462 | 0 | out->dataZ = subset_data - (char *) out; |
463 | 0 |
|
464 | 0 | unsigned int glyph_offset = 0; |
465 | 0 | for (hb_codepoint_t gid = (c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE) ? 0 : 1; |
466 | 0 | gid < num_glyphs; |
467 | 0 | gid++) |
468 | 0 | { |
469 | 0 | hb_codepoint_t old_gid; |
470 | 0 | hb_bytes_t var_data_bytes = c->plan->old_gid_for_new_gid (gid, &old_gid) |
471 | 0 | ? get_glyph_var_data_bytes (c->source_blob, old_gid) |
472 | 0 | : hb_bytes_t (); |
473 | 0 |
|
474 | 0 | if (long_offset) |
475 | 0 | ((HBUINT32 *) subset_offsets)[gid] = glyph_offset; |
476 | 0 | else |
477 | 0 | ((HBUINT16 *) subset_offsets)[gid] = glyph_offset / 2; |
478 | 0 |
|
479 | 0 | if (var_data_bytes.length > 0) |
480 | 0 | hb_memcpy (subset_data, var_data_bytes.arrayZ, var_data_bytes.length); |
481 | 0 | subset_data += var_data_bytes.length; |
482 | 0 | glyph_offset += var_data_bytes.length; |
483 | 0 | } |
484 | 0 | if (long_offset) |
485 | 0 | ((HBUINT32 *) subset_offsets)[num_glyphs] = glyph_offset; |
486 | 0 | else |
487 | 0 | ((HBUINT16 *) subset_offsets)[num_glyphs] = glyph_offset / 2; |
488 | 0 |
|
489 | 0 | return_trace (true); |
490 | 0 | } |
491 | | |
492 | | protected: |
493 | | const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob, hb_codepoint_t glyph) const |
494 | 326k | { |
495 | 326k | unsigned start_offset = get_offset (glyph); |
496 | 326k | unsigned end_offset = get_offset (glyph+1); |
497 | 326k | if (unlikely (end_offset < start_offset)) return hb_bytes_t (); |
498 | 236k | unsigned length = end_offset - start_offset; |
499 | 236k | hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length); |
500 | 236k | return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t (); |
501 | 326k | } |
502 | | |
503 | 666k | bool is_long_offset () const { return flags & 1; } |
504 | | |
505 | | unsigned get_offset (unsigned i) const |
506 | 652k | { |
507 | 652k | if (unlikely (i > glyphCount)) return 0; |
508 | 652k | _hb_compiler_memory_r_barrier (); |
509 | 652k | return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2; |
510 | 652k | } |
511 | | |
512 | 114k | const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; } |
513 | 551k | const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; } |
514 | | |
515 | | public: |
516 | | struct accelerator_t |
517 | | { |
518 | | accelerator_t (hb_face_t *face) |
519 | 1.31M | { table = hb_sanitize_context_t ().reference_table<gvar> (face); } |
520 | 1.31M | ~accelerator_t () { table.destroy (); } |
521 | | |
522 | | private: |
523 | | |
524 | | static float infer_delta (const hb_array_t<contour_point_t> points, |
525 | | const hb_array_t<contour_point_t> deltas, |
526 | | unsigned int target, unsigned int prev, unsigned int next, |
527 | | float contour_point_t::*m) |
528 | 36.6k | { |
529 | 36.6k | float target_val = points.arrayZ[target].*m; |
530 | 36.6k | float prev_val = points.arrayZ[prev].*m; |
531 | 36.6k | float next_val = points.arrayZ[next].*m; |
532 | 36.6k | float prev_delta = deltas.arrayZ[prev].*m; |
533 | 36.6k | float next_delta = deltas.arrayZ[next].*m; |
534 | | |
535 | 36.6k | if (prev_val == next_val) |
536 | 16.0k | return (prev_delta == next_delta) ? prev_delta : 0.f; |
537 | 20.5k | else if (target_val <= hb_min (prev_val, next_val)) |
538 | 7.50k | return (prev_val < next_val) ? prev_delta : next_delta; |
539 | 13.0k | else if (target_val >= hb_max (prev_val, next_val)) |
540 | 9.21k | return (prev_val > next_val) ? prev_delta : next_delta; |
541 | | |
542 | | /* linear interpolation */ |
543 | 3.81k | float r = (target_val - prev_val) / (next_val - prev_val); |
544 | 3.81k | return prev_delta + r * (next_delta - prev_delta); |
545 | 36.6k | } |
546 | | |
547 | | static unsigned int next_index (unsigned int i, unsigned int start, unsigned int end) |
548 | 47.8k | { return (i >= end) ? start : (i + 1); } |
549 | | |
550 | | public: |
551 | | bool apply_deltas_to_points (hb_codepoint_t glyph, |
552 | | hb_array_t<int> coords, |
553 | | const hb_array_t<contour_point_t> points) const |
554 | 1.10M | { |
555 | 1.10M | if (!coords) return true; |
556 | | |
557 | 742k | if (unlikely (glyph >= table->glyphCount)) return true; |
558 | | |
559 | 326k | hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyph); |
560 | 326k | if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true; |
561 | 181k | hb_vector_t<unsigned int> shared_indices; |
562 | 181k | GlyphVariationData::tuple_iterator_t iterator; |
563 | 181k | if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount, |
564 | 181k | shared_indices, &iterator)) |
565 | 35.5k | return true; /* so isn't applied at all */ |
566 | | |
567 | | /* Save original points for inferred delta calculation */ |
568 | 146k | contour_point_vector_t orig_points_vec; |
569 | 146k | orig_points_vec.extend (points); |
570 | 146k | if (unlikely (orig_points_vec.in_error ())) return false; |
571 | 142k | auto orig_points = orig_points_vec.as_array (); |
572 | | |
573 | 142k | contour_point_vector_t deltas_vec; /* flag is used to indicate referenced point */ |
574 | 142k | if (unlikely (!deltas_vec.resize (points.length, false))) return false; |
575 | 138k | auto deltas = deltas_vec.as_array (); |
576 | | |
577 | 138k | hb_vector_t<unsigned> end_points; |
578 | 78.2M | for (unsigned i = 0; i < points.length; ++i) |
579 | 78.1M | if (points.arrayZ[i].is_end_point) |
580 | 10.9k | end_points.push (i); |
581 | | |
582 | 138k | unsigned num_coords = table->axisCount; |
583 | 138k | hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * table->axisCount); |
584 | | |
585 | 138k | hb_vector_t<unsigned int> private_indices; |
586 | 138k | hb_vector_t<int> x_deltas; |
587 | 138k | hb_vector_t<int> y_deltas; |
588 | 138k | do |
589 | 256k | { |
590 | 256k | float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples); |
591 | 256k | if (scalar == 0.f) continue; |
592 | 117k | const HBUINT8 *p = iterator.get_serialized_data (); |
593 | 117k | unsigned int length = iterator.current_tuple->get_data_size (); |
594 | 117k | if (unlikely (!iterator.var_data_bytes.check_range (p, length))) |
595 | 12.9k | return false; |
596 | | |
597 | 104k | const HBUINT8 *end = p + length; |
598 | | |
599 | 104k | bool has_private_points = iterator.current_tuple->has_private_points (); |
600 | 104k | if (has_private_points && |
601 | 104k | !GlyphVariationData::unpack_points (p, private_indices, end)) |
602 | 15.9k | return false; |
603 | 88.8k | const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices; |
604 | | |
605 | 88.8k | bool apply_to_all = (indices.length == 0); |
606 | 88.8k | unsigned int num_deltas = apply_to_all ? points.length : indices.length; |
607 | 88.8k | if (unlikely (!x_deltas.resize (num_deltas, false))) return false; |
608 | 86.9k | if (unlikely (!GlyphVariationData::unpack_deltas (p, x_deltas, end))) return false; |
609 | 49.5k | if (unlikely (!y_deltas.resize (num_deltas, false))) return false; |
610 | 48.4k | if (unlikely (!GlyphVariationData::unpack_deltas (p, y_deltas, end))) return false; |
611 | | |
612 | 27.3k | hb_memset (deltas.arrayZ, 0, deltas.get_size ()); |
613 | | |
614 | 27.3k | unsigned ref_points = 0; |
615 | 27.3k | if (scalar != 1.0f) |
616 | 187k | for (unsigned int i = 0; i < num_deltas; i++) |
617 | 174k | { |
618 | 174k | unsigned int pt_index = apply_to_all ? i : indices[i]; |
619 | 174k | if (unlikely (pt_index >= deltas.length)) continue; |
620 | 169k | auto &delta = deltas.arrayZ[pt_index]; |
621 | 169k | ref_points += !delta.flag; |
622 | 169k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
623 | 169k | delta.x += x_deltas.arrayZ[i] * scalar; |
624 | 169k | delta.y += y_deltas.arrayZ[i] * scalar; |
625 | 169k | } |
626 | 14.0k | else |
627 | 96.5k | for (unsigned int i = 0; i < num_deltas; i++) |
628 | 82.5k | { |
629 | 82.5k | unsigned int pt_index = apply_to_all ? i : indices[i]; |
630 | 82.5k | if (unlikely (pt_index >= deltas.length)) continue; |
631 | 76.6k | auto &delta = deltas.arrayZ[pt_index]; |
632 | 76.6k | ref_points += !delta.flag; |
633 | 76.6k | delta.flag = 1; /* this point is referenced, i.e., explicit deltas specified */ |
634 | 76.6k | delta.x += x_deltas.arrayZ[i]; |
635 | 76.6k | delta.y += y_deltas.arrayZ[i]; |
636 | 76.6k | } |
637 | | |
638 | | /* infer deltas for unreferenced points */ |
639 | 27.3k | if (ref_points && ref_points < orig_points.length) |
640 | 9.30k | { |
641 | 9.30k | unsigned start_point = 0; |
642 | 11.8k | for (unsigned c = 0; c < end_points.length; c++) |
643 | 2.52k | { |
644 | 2.52k | unsigned end_point = end_points.arrayZ[c]; |
645 | | |
646 | | /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */ |
647 | 2.52k | unsigned unref_count = 0; |
648 | 30.7k | for (unsigned i = start_point; i < end_point + 1; i++) |
649 | 28.1k | unref_count += deltas.arrayZ[i].flag; |
650 | 2.52k | unref_count = (end_point - start_point + 1) - unref_count; |
651 | | |
652 | 2.52k | unsigned j = start_point; |
653 | 2.52k | if (unref_count == 0 || unref_count > end_point - start_point) |
654 | 1.55k | goto no_more_gaps; |
655 | | |
656 | 965 | for (;;) |
657 | 1.34k | { |
658 | | /* Locate the next gap of unreferenced points between two referenced points prev and next. |
659 | | * Note that a gap may wrap around at left (start_point) and/or at right (end_point). |
660 | | */ |
661 | 1.34k | unsigned int prev, next, i; |
662 | 1.34k | for (;;) |
663 | 9.50k | { |
664 | 9.50k | i = j; |
665 | 9.50k | j = next_index (i, start_point, end_point); |
666 | 9.50k | if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break; |
667 | 9.50k | } |
668 | 1.34k | prev = j = i; |
669 | 1.34k | for (;;) |
670 | 19.6k | { |
671 | 19.6k | i = j; |
672 | 19.6k | j = next_index (i, start_point, end_point); |
673 | 19.6k | if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break; |
674 | 19.6k | } |
675 | 1.34k | next = j; |
676 | | /* Infer deltas for all unref points in the gap between prev and next */ |
677 | 1.34k | i = prev; |
678 | 1.34k | for (;;) |
679 | 18.6k | { |
680 | 18.6k | i = next_index (i, start_point, end_point); |
681 | 18.6k | if (i == next) break; |
682 | 18.3k | deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x); |
683 | 18.3k | deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y); |
684 | 18.3k | if (--unref_count == 0) goto no_more_gaps; |
685 | 18.3k | } |
686 | 1.34k | } |
687 | 2.52k | no_more_gaps: |
688 | 2.52k | start_point = end_point + 1; |
689 | 2.52k | } |
690 | 9.30k | } |
691 | | |
692 | | /* apply specified / inferred deltas to points */ |
693 | 3.96M | for (unsigned int i = 0; i < points.length; i++) |
694 | 3.93M | { |
695 | 3.93M | points.arrayZ[i].x += deltas.arrayZ[i].x; |
696 | 3.93M | points.arrayZ[i].y += deltas.arrayZ[i].y; |
697 | 3.93M | } |
698 | 165k | } while (iterator.move_to_next ()); |
699 | | |
700 | 48.3k | return true; |
701 | 138k | } |
702 | | |
703 | 0 | unsigned int get_axis_count () const { return table->axisCount; } |
704 | | |
705 | | private: |
706 | | hb_blob_ptr_t<gvar> table; |
707 | | }; |
708 | | |
709 | | protected: |
710 | | FixedVersion<>version; /* Version number of the glyph variations table |
711 | | * Set to 0x00010000u. */ |
712 | | HBUINT16 axisCount; /* The number of variation axes for this font. This must be |
713 | | * the same number as axisCount in the 'fvar' table. */ |
714 | | HBUINT16 sharedTupleCount; |
715 | | /* The number of shared tuple records. Shared tuple records |
716 | | * can be referenced within glyph variation data tables for |
717 | | * multiple glyphs, as opposed to other tuple records stored |
718 | | * directly within a glyph variation data table. */ |
719 | | NNOffset32To<UnsizedArrayOf<F2DOT14>> |
720 | | sharedTuples; /* Offset from the start of this table to the shared tuple records. |
721 | | * Array of tuple records shared across all glyph variation data tables. */ |
722 | | HBUINT16 glyphCount; /* The number of glyphs in this font. This must match the number of |
723 | | * glyphs stored elsewhere in the font. */ |
724 | | HBUINT16 flags; /* Bit-field that gives the format of the offset array that follows. |
725 | | * If bit 0 is clear, the offsets are uint16; if bit 0 is set, the |
726 | | * offsets are uint32. */ |
727 | | Offset32To<GlyphVariationData> |
728 | | dataZ; /* Offset from the start of this table to the array of |
729 | | * GlyphVariationData tables. */ |
730 | | UnsizedArrayOf<HBUINT8> |
731 | | offsetZ; /* Offsets from the start of the GlyphVariationData array |
732 | | * to each GlyphVariationData table. */ |
733 | | public: |
734 | | DEFINE_SIZE_ARRAY (20, offsetZ); |
735 | | }; |
736 | | |
737 | | struct gvar_accelerator_t : gvar::accelerator_t { |
738 | 1.31M | gvar_accelerator_t (hb_face_t *face) : gvar::accelerator_t (face) {} |
739 | | }; |
740 | | |
741 | | } /* namespace OT */ |
742 | | |
743 | | #endif /* HB_OT_VAR_GVAR_TABLE_HH */ |