/src/skia/src/utils/SkPolyUtils.cpp
Line | Count | Source (jump to first uncovered line) |
1 | | /* |
2 | | * Copyright 2017 Google Inc. |
3 | | * |
4 | | * Use of this source code is governed by a BSD-style license that can be |
5 | | * found in the LICENSE file. |
6 | | */ |
7 | | |
8 | | #include "src/utils/SkPolyUtils.h" |
9 | | |
10 | | #include <limits> |
11 | | |
12 | | #include "include/private/SkNx.h" |
13 | | #include "include/private/SkTArray.h" |
14 | | #include "include/private/SkTemplates.h" |
15 | | #include "src/core/SkPointPriv.h" |
16 | | #include "src/core/SkTDPQueue.h" |
17 | | #include "src/core/SkTInternalLList.h" |
18 | | |
19 | | ////////////////////////////////////////////////////////////////////////////////// |
20 | | // Helper data structures and functions |
21 | | |
22 | | struct OffsetSegment { |
23 | | SkPoint fP0; |
24 | | SkVector fV; |
25 | | }; |
26 | | |
27 | | constexpr SkScalar kCrossTolerance = SK_ScalarNearlyZero * SK_ScalarNearlyZero; |
28 | | |
29 | | // Computes perpDot for point p compared to segment defined by origin p0 and vector v. |
30 | | // A positive value means the point is to the left of the segment, |
31 | | // negative is to the right, 0 is collinear. |
32 | 456k | static int compute_side(const SkPoint& p0, const SkVector& v, const SkPoint& p) { |
33 | 456k | SkVector w = p - p0; |
34 | 456k | SkScalar perpDot = v.cross(w); |
35 | 456k | if (!SkScalarNearlyZero(perpDot, kCrossTolerance)) { |
36 | 198k | return ((perpDot > 0) ? 1 : -1); |
37 | 395k | } |
38 | | |
39 | 60.8k | return 0; |
40 | 60.8k | } |
41 | | |
42 | | // Returns 1 for cw, -1 for ccw and 0 if zero signed area (either degenerate or self-intersecting) |
43 | 2.79k | int SkGetPolygonWinding(const SkPoint* polygonVerts, int polygonSize) { |
44 | 2.79k | if (polygonSize < 3) { |
45 | 74 | return 0; |
46 | 74 | } |
47 | | |
48 | | // compute area and use sign to determine winding |
49 | 2.71k | SkScalar quadArea = 0; |
50 | 2.71k | SkVector v0 = polygonVerts[1] - polygonVerts[0]; |
51 | 1.37M | for (int curr = 2; curr < polygonSize; ++curr) { |
52 | 1.36M | SkVector v1 = polygonVerts[curr] - polygonVerts[0]; |
53 | 1.36M | quadArea += v0.cross(v1); |
54 | 1.36M | v0 = v1; |
55 | 1.36M | } |
56 | 2.71k | if (SkScalarNearlyZero(quadArea, kCrossTolerance)) { |
57 | 203 | return 0; |
58 | 203 | } |
59 | | // 1 == ccw, -1 == cw |
60 | 2.51k | return (quadArea > 0) ? 1 : -1; |
61 | 2.51k | } |
62 | | |
63 | | // Compute difference vector to offset p0-p1 'offset' units in direction specified by 'side' |
64 | | bool compute_offset_vector(const SkPoint& p0, const SkPoint& p1, SkScalar offset, int side, |
65 | 13.7k | SkPoint* vector) { |
66 | 13.7k | SkASSERT(side == -1 || side == 1); |
67 | | // if distances are equal, can just outset by the perpendicular |
68 | 13.7k | SkVector perp = SkVector::Make(p0.fY - p1.fY, p1.fX - p0.fX); |
69 | 13.7k | if (!perp.setLength(offset*side)) { |
70 | 14 | return false; |
71 | 14 | } |
72 | 13.7k | *vector = perp; |
73 | 13.7k | return true; |
74 | 13.7k | } |
75 | | |
76 | | // check interval to see if intersection is in segment |
77 | 280M | static inline bool outside_interval(SkScalar numer, SkScalar denom, bool denomPositive) { |
78 | 280M | return (denomPositive && (numer < 0 || numer > denom)) || |
79 | 277M | (!denomPositive && (numer > 0 || numer < denom)); |
80 | 280M | } |
81 | | |
82 | | // special zero-length test when we're using vdotv as a denominator |
83 | 35.1M | static inline bool zero_length(const SkPoint& v, SkScalar vdotv) { |
84 | 35.1M | return !(SkScalarsAreFinite(v.fX, v.fY) && vdotv); |
85 | 35.1M | } |
86 | | |
87 | | // Compute the intersection 'p' between segments s0 and s1, if any. |
88 | | // 's' is the parametric value for the intersection along 's0' & 't' is the same for 's1'. |
89 | | // Returns false if there is no intersection. |
90 | | // If the length squared of a segment is 0, then we treat the segment as degenerate |
91 | | // and use only the first endpoint for tests. |
92 | | static bool compute_intersection(const OffsetSegment& s0, const OffsetSegment& s1, |
93 | 159M | SkPoint* p, SkScalar* s, SkScalar* t) { |
94 | 159M | const SkVector& v0 = s0.fV; |
95 | 159M | const SkVector& v1 = s1.fV; |
96 | 159M | SkVector w = s1.fP0 - s0.fP0; |
97 | 159M | SkScalar denom = v0.cross(v1); |
98 | 159M | bool denomPositive = (denom > 0); |
99 | 159M | SkScalar sNumer, tNumer; |
100 | 159M | if (SkScalarNearlyZero(denom, kCrossTolerance)) { |
101 | | // segments are parallel, but not collinear |
102 | 28.9M | if (!SkScalarNearlyZero(w.cross(v0), kCrossTolerance) || |
103 | 27.8M | !SkScalarNearlyZero(w.cross(v1), kCrossTolerance)) { |
104 | 5.66M | return false; |
105 | 5.66M | } |
106 | | |
107 | | // Check for zero-length segments |
108 | 23.2M | SkScalar v0dotv0 = v0.dot(v0); |
109 | 23.2M | if (zero_length(v0, v0dotv0)) { |
110 | | // Both are zero-length |
111 | 11.7M | SkScalar v1dotv1 = v1.dot(v1); |
112 | 11.7M | if (zero_length(v1, v1dotv1)) { |
113 | | // Check if they're the same point |
114 | 221k | if (!SkPointPriv::CanNormalize(w.fX, w.fY)) { |
115 | 918 | *p = s0.fP0; |
116 | 918 | *s = 0; |
117 | 918 | *t = 0; |
118 | 918 | return true; |
119 | 220k | } else { |
120 | | // Intersection is indeterminate |
121 | 220k | return false; |
122 | 220k | } |
123 | 11.5M | } |
124 | | // Otherwise project segment0's origin onto segment1 |
125 | 11.5M | tNumer = v1.dot(-w); |
126 | 11.5M | denom = v1dotv1; |
127 | 11.5M | if (outside_interval(tNumer, denom, true)) { |
128 | 91.0k | return false; |
129 | 91.0k | } |
130 | 11.4M | sNumer = 0; |
131 | 11.5M | } else { |
132 | | // Project segment1's endpoints onto segment0 |
133 | 11.5M | sNumer = v0.dot(w); |
134 | 11.5M | denom = v0dotv0; |
135 | 11.5M | tNumer = 0; |
136 | 11.5M | if (outside_interval(sNumer, denom, true)) { |
137 | | // The first endpoint doesn't lie on segment0 |
138 | | // If segment1 is degenerate, then there's no collision |
139 | 67.6k | SkScalar v1dotv1 = v1.dot(v1); |
140 | 67.6k | if (zero_length(v1, v1dotv1)) { |
141 | 4.58k | return false; |
142 | 4.58k | } |
143 | | |
144 | | // Otherwise try the other one |
145 | 63.1k | SkScalar oldSNumer = sNumer; |
146 | 63.1k | sNumer = v0.dot(w + v1); |
147 | 63.1k | tNumer = denom; |
148 | 63.1k | if (outside_interval(sNumer, denom, true)) { |
149 | | // it's possible that segment1's interval surrounds segment0 |
150 | | // this is false if params have the same signs, and in that case no collision |
151 | 62.6k | if (sNumer*oldSNumer > 0) { |
152 | 38.0k | return false; |
153 | 38.0k | } |
154 | | // otherwise project segment0's endpoint onto segment1 instead |
155 | 24.6k | sNumer = 0; |
156 | 24.6k | tNumer = v1.dot(-w); |
157 | 24.6k | denom = v1dotv1; |
158 | 24.6k | } |
159 | 63.1k | } |
160 | 11.5M | } |
161 | 130M | } else { |
162 | 130M | sNumer = w.cross(v1); |
163 | 130M | if (outside_interval(sNumer, denom, denomPositive)) { |
164 | 4.22M | return false; |
165 | 4.22M | } |
166 | 126M | tNumer = w.cross(v0); |
167 | 126M | if (outside_interval(tNumer, denom, denomPositive)) { |
168 | 821k | return false; |
169 | 821k | } |
170 | 148M | } |
171 | | |
172 | 148M | SkScalar localS = sNumer/denom; |
173 | 148M | SkScalar localT = tNumer/denom; |
174 | | |
175 | 148M | *p = s0.fP0 + v0*localS; |
176 | 148M | *s = localS; |
177 | 148M | *t = localT; |
178 | | |
179 | 148M | return true; |
180 | 148M | } |
181 | | |
182 | 2.24k | bool SkIsConvexPolygon(const SkPoint* polygonVerts, int polygonSize) { |
183 | 2.24k | if (polygonSize < 3) { |
184 | 110 | return false; |
185 | 110 | } |
186 | | |
187 | 2.13k | SkScalar lastArea = 0; |
188 | 2.13k | SkScalar lastPerpDot = 0; |
189 | | |
190 | 2.13k | int prevIndex = polygonSize - 1; |
191 | 2.13k | int currIndex = 0; |
192 | 2.13k | int nextIndex = 1; |
193 | 2.13k | SkPoint origin = polygonVerts[0]; |
194 | 2.13k | SkVector v0 = polygonVerts[currIndex] - polygonVerts[prevIndex]; |
195 | 2.13k | SkVector v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
196 | 2.13k | SkVector w0 = polygonVerts[currIndex] - origin; |
197 | 2.13k | SkVector w1 = polygonVerts[nextIndex] - origin; |
198 | 297k | for (int i = 0; i < polygonSize; ++i) { |
199 | 296k | if (!polygonVerts[i].isFinite()) { |
200 | 134 | return false; |
201 | 134 | } |
202 | | |
203 | | // Check that winding direction is always the same (otherwise we have a reflex vertex) |
204 | 296k | SkScalar perpDot = v0.cross(v1); |
205 | 296k | if (lastPerpDot*perpDot < 0) { |
206 | 737 | return false; |
207 | 737 | } |
208 | 295k | if (0 != perpDot) { |
209 | 56.3k | lastPerpDot = perpDot; |
210 | 56.3k | } |
211 | | |
212 | | // If the signed area ever flips it's concave |
213 | | // TODO: see if we can verify convexity only with signed area |
214 | 295k | SkScalar quadArea = w0.cross(w1); |
215 | 295k | if (quadArea*lastArea < 0) { |
216 | 420 | return false; |
217 | 420 | } |
218 | 295k | if (0 != quadArea) { |
219 | 122k | lastArea = quadArea; |
220 | 122k | } |
221 | | |
222 | 295k | prevIndex = currIndex; |
223 | 295k | currIndex = nextIndex; |
224 | 295k | nextIndex = (currIndex + 1) % polygonSize; |
225 | 295k | v0 = v1; |
226 | 295k | v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
227 | 295k | w0 = w1; |
228 | 295k | w1 = polygonVerts[nextIndex] - origin; |
229 | 295k | } |
230 | | |
231 | 839 | return true; |
232 | 2.13k | } |
233 | | |
234 | | struct OffsetEdge { |
235 | | OffsetEdge* fPrev; |
236 | | OffsetEdge* fNext; |
237 | | OffsetSegment fOffset; |
238 | | SkPoint fIntersection; |
239 | | SkScalar fTValue; |
240 | | uint16_t fIndex; |
241 | | uint16_t fEnd; |
242 | | |
243 | 41.2M | void init(uint16_t start = 0, uint16_t end = 0) { |
244 | 41.2M | fIntersection = fOffset.fP0; |
245 | 41.2M | fTValue = SK_ScalarMin; |
246 | 41.2M | fIndex = start; |
247 | 41.2M | fEnd = end; |
248 | 41.2M | } |
249 | | |
250 | | // special intersection check that looks for endpoint intersection |
251 | | bool checkIntersection(const OffsetEdge* that, |
252 | 190M | SkPoint* p, SkScalar* s, SkScalar* t) { |
253 | 190M | if (this->fEnd == that->fIndex) { |
254 | 180M | SkPoint p1 = this->fOffset.fP0 + this->fOffset.fV; |
255 | 180M | if (SkPointPriv::EqualsWithinTolerance(p1, that->fOffset.fP0)) { |
256 | 31.6M | *p = p1; |
257 | 31.6M | *s = SK_Scalar1; |
258 | 31.6M | *t = 0; |
259 | 31.6M | return true; |
260 | 31.6M | } |
261 | 159M | } |
262 | | |
263 | 159M | return compute_intersection(this->fOffset, that->fOffset, p, s, t); |
264 | 159M | } |
265 | | |
266 | | // computes the line intersection and then the "distance" from that to this |
267 | | // this is really a signed squared distance, where negative means that |
268 | | // the intersection lies inside this->fOffset |
269 | 22.1M | SkScalar computeCrossingDistance(const OffsetEdge* that) { |
270 | 22.1M | const OffsetSegment& s0 = this->fOffset; |
271 | 22.1M | const OffsetSegment& s1 = that->fOffset; |
272 | 22.1M | const SkVector& v0 = s0.fV; |
273 | 22.1M | const SkVector& v1 = s1.fV; |
274 | | |
275 | 22.1M | SkScalar denom = v0.cross(v1); |
276 | 22.1M | if (SkScalarNearlyZero(denom, kCrossTolerance)) { |
277 | | // segments are parallel |
278 | 12.6M | return SK_ScalarMax; |
279 | 12.6M | } |
280 | | |
281 | 9.44M | SkVector w = s1.fP0 - s0.fP0; |
282 | 9.44M | SkScalar localS = w.cross(v1) / denom; |
283 | 9.44M | if (localS < 0) { |
284 | 3.27M | localS = -localS; |
285 | 6.16M | } else { |
286 | 6.16M | localS -= SK_Scalar1; |
287 | 6.16M | } |
288 | | |
289 | 9.44M | localS *= SkScalarAbs(localS); |
290 | 9.44M | localS *= v0.dot(v0); |
291 | | |
292 | 9.44M | return localS; |
293 | 9.44M | } |
294 | | |
295 | | }; |
296 | | |
297 | 11.0M | static void remove_node(const OffsetEdge* node, OffsetEdge** head) { |
298 | | // remove from linked list |
299 | 11.0M | node->fPrev->fNext = node->fNext; |
300 | 11.0M | node->fNext->fPrev = node->fPrev; |
301 | 11.0M | if (node == *head) { |
302 | 3.87M | *head = (node->fNext == node) ? nullptr : node->fNext; |
303 | 3.87M | } |
304 | 11.0M | } |
305 | | |
306 | | ////////////////////////////////////////////////////////////////////////////////// |
307 | | |
308 | | // The objective here is to inset all of the edges by the given distance, and then |
309 | | // remove any invalid inset edges by detecting right-hand turns. In a ccw polygon, |
310 | | // we should only be making left-hand turns (for cw polygons, we use the winding |
311 | | // parameter to reverse this). We detect this by checking whether the second intersection |
312 | | // on an edge is closer to its tail than the first one. |
313 | | // |
314 | | // We might also have the case that there is no intersection between two neighboring inset edges. |
315 | | // In this case, one edge will lie to the right of the other and should be discarded along with |
316 | | // its previous intersection (if any). |
317 | | // |
318 | | // Note: the assumption is that inputPolygon is convex and has no coincident points. |
319 | | // |
320 | | bool SkInsetConvexPolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
321 | 975 | SkScalar inset, SkTDArray<SkPoint>* insetPolygon) { |
322 | 975 | if (inputPolygonSize < 3) { |
323 | 14 | return false; |
324 | 14 | } |
325 | | |
326 | | // restrict this to match other routines |
327 | | // practically we don't want anything bigger than this anyway |
328 | 961 | if (inputPolygonSize > std::numeric_limits<uint16_t>::max()) { |
329 | 0 | return false; |
330 | 0 | } |
331 | | |
332 | | // can't inset by a negative or non-finite amount |
333 | 961 | if (inset < -SK_ScalarNearlyZero || !SkScalarIsFinite(inset)) { |
334 | 110 | return false; |
335 | 110 | } |
336 | | |
337 | | // insetting close to zero just returns the original poly |
338 | 851 | if (inset <= SK_ScalarNearlyZero) { |
339 | 129k | for (int i = 0; i < inputPolygonSize; ++i) { |
340 | 128k | *insetPolygon->push() = inputPolygonVerts[i]; |
341 | 128k | } |
342 | 576 | return true; |
343 | 576 | } |
344 | | |
345 | | // get winding direction |
346 | 275 | int winding = SkGetPolygonWinding(inputPolygonVerts, inputPolygonSize); |
347 | 275 | if (0 == winding) { |
348 | 2 | return false; |
349 | 2 | } |
350 | | |
351 | | // set up |
352 | 273 | SkAutoSTMalloc<64, OffsetEdge> edgeData(inputPolygonSize); |
353 | 273 | int prev = inputPolygonSize - 1; |
354 | 5.81k | for (int curr = 0; curr < inputPolygonSize; prev = curr, ++curr) { |
355 | 5.62k | int next = (curr + 1) % inputPolygonSize; |
356 | 5.62k | if (!inputPolygonVerts[curr].isFinite()) { |
357 | 6 | return false; |
358 | 6 | } |
359 | | // check for convexity just to be sure |
360 | 5.62k | if (compute_side(inputPolygonVerts[prev], inputPolygonVerts[curr] - inputPolygonVerts[prev], |
361 | 81 | inputPolygonVerts[next])*winding < 0) { |
362 | 81 | return false; |
363 | 81 | } |
364 | 5.54k | SkVector v = inputPolygonVerts[next] - inputPolygonVerts[curr]; |
365 | 5.54k | SkVector perp = SkVector::Make(-v.fY, v.fX); |
366 | 5.54k | perp.setLength(inset*winding); |
367 | 5.54k | edgeData[curr].fPrev = &edgeData[prev]; |
368 | 5.54k | edgeData[curr].fNext = &edgeData[next]; |
369 | 5.54k | edgeData[curr].fOffset.fP0 = inputPolygonVerts[curr] + perp; |
370 | 5.54k | edgeData[curr].fOffset.fV = v; |
371 | 5.54k | edgeData[curr].init(); |
372 | 5.54k | } |
373 | | |
374 | 186 | OffsetEdge* head = &edgeData[0]; |
375 | 186 | OffsetEdge* currEdge = head; |
376 | 186 | OffsetEdge* prevEdge = currEdge->fPrev; |
377 | 186 | int insetVertexCount = inputPolygonSize; |
378 | 186 | unsigned int iterations = 0; |
379 | 186 | unsigned int maxIterations = inputPolygonSize * inputPolygonSize; |
380 | 323k | while (head && prevEdge != currEdge) { |
381 | 323k | ++iterations; |
382 | | // we should check each edge against each other edge at most once |
383 | 323k | if (iterations > maxIterations) { |
384 | 51 | return false; |
385 | 51 | } |
386 | | |
387 | 323k | SkScalar s, t; |
388 | 323k | SkPoint intersection; |
389 | 323k | if (compute_intersection(prevEdge->fOffset, currEdge->fOffset, |
390 | 320k | &intersection, &s, &t)) { |
391 | | // if new intersection is further back on previous inset from the prior intersection |
392 | 320k | if (s < prevEdge->fTValue) { |
393 | | // no point in considering this one again |
394 | 211 | remove_node(prevEdge, &head); |
395 | 211 | --insetVertexCount; |
396 | | // go back one segment |
397 | 211 | prevEdge = prevEdge->fPrev; |
398 | | // we've already considered this intersection, we're done |
399 | 320k | } else if (currEdge->fTValue > SK_ScalarMin && |
400 | 14.4k | SkPointPriv::EqualsWithinTolerance(intersection, |
401 | 14.4k | currEdge->fIntersection, |
402 | 81 | 1.0e-6f)) { |
403 | 81 | break; |
404 | 320k | } else { |
405 | | // add intersection |
406 | 320k | currEdge->fIntersection = intersection; |
407 | 320k | currEdge->fTValue = t; |
408 | | |
409 | | // go to next segment |
410 | 320k | prevEdge = currEdge; |
411 | 320k | currEdge = currEdge->fNext; |
412 | 320k | } |
413 | 2.30k | } else { |
414 | | // if prev to right side of curr |
415 | 2.30k | int side = winding*compute_side(currEdge->fOffset.fP0, |
416 | 2.30k | currEdge->fOffset.fV, |
417 | 2.30k | prevEdge->fOffset.fP0); |
418 | 2.30k | if (side < 0 && |
419 | 1.21k | side == winding*compute_side(currEdge->fOffset.fP0, |
420 | 1.21k | currEdge->fOffset.fV, |
421 | 1.17k | prevEdge->fOffset.fP0 + prevEdge->fOffset.fV)) { |
422 | | // no point in considering this one again |
423 | 1.17k | remove_node(prevEdge, &head); |
424 | 1.17k | --insetVertexCount; |
425 | | // go back one segment |
426 | 1.17k | prevEdge = prevEdge->fPrev; |
427 | 1.13k | } else { |
428 | | // move to next segment |
429 | 1.13k | remove_node(currEdge, &head); |
430 | 1.13k | --insetVertexCount; |
431 | 1.13k | currEdge = currEdge->fNext; |
432 | 1.13k | } |
433 | 2.30k | } |
434 | 323k | } |
435 | | |
436 | | // store all the valid intersections that aren't nearly coincident |
437 | | // TODO: look at the main algorithm and see if we can detect these better |
438 | 135 | insetPolygon->reset(); |
439 | 135 | if (!head) { |
440 | 0 | return false; |
441 | 0 | } |
442 | | |
443 | 135 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
444 | 135 | if (insetVertexCount >= 0) { |
445 | 135 | insetPolygon->setReserve(insetVertexCount); |
446 | 135 | } |
447 | 135 | int currIndex = 0; |
448 | 135 | *insetPolygon->push() = head->fIntersection; |
449 | 135 | currEdge = head->fNext; |
450 | 2.05k | while (currEdge != head) { |
451 | 1.91k | if (!SkPointPriv::EqualsWithinTolerance(currEdge->fIntersection, |
452 | 1.91k | (*insetPolygon)[currIndex], |
453 | 1.06k | kCleanupTolerance)) { |
454 | 1.06k | *insetPolygon->push() = currEdge->fIntersection; |
455 | 1.06k | currIndex++; |
456 | 1.06k | } |
457 | 1.91k | currEdge = currEdge->fNext; |
458 | 1.91k | } |
459 | | // make sure the first and last points aren't coincident |
460 | 135 | if (currIndex >= 1 && |
461 | 51 | SkPointPriv::EqualsWithinTolerance((*insetPolygon)[0], (*insetPolygon)[currIndex], |
462 | 4 | kCleanupTolerance)) { |
463 | 4 | insetPolygon->pop(); |
464 | 4 | } |
465 | | |
466 | 135 | return SkIsConvexPolygon(insetPolygon->begin(), insetPolygon->count()); |
467 | 135 | } |
468 | | |
469 | | /////////////////////////////////////////////////////////////////////////////////////////// |
470 | | |
471 | | // compute the number of points needed for a circular join when offsetting a reflex vertex |
472 | | bool SkComputeRadialSteps(const SkVector& v1, const SkVector& v2, SkScalar offset, |
473 | 16.8k | SkScalar* rotSin, SkScalar* rotCos, int* n) { |
474 | 16.8k | const SkScalar kRecipPixelsPerArcSegment = 0.25f; |
475 | | |
476 | 16.8k | SkScalar rCos = v1.dot(v2); |
477 | 16.8k | if (!SkScalarIsFinite(rCos)) { |
478 | 2 | return false; |
479 | 2 | } |
480 | 16.8k | SkScalar rSin = v1.cross(v2); |
481 | 16.8k | if (!SkScalarIsFinite(rSin)) { |
482 | 1 | return false; |
483 | 1 | } |
484 | 16.8k | SkScalar theta = SkScalarATan2(rSin, rCos); |
485 | | |
486 | 16.8k | SkScalar floatSteps = SkScalarAbs(offset*theta*kRecipPixelsPerArcSegment); |
487 | | // limit the number of steps to at most max uint16_t (that's all we can index) |
488 | | // knock one value off the top to account for rounding |
489 | 16.8k | if (floatSteps >= std::numeric_limits<uint16_t>::max()) { |
490 | 1 | return false; |
491 | 1 | } |
492 | 16.8k | int steps = SkScalarRoundToInt(floatSteps); |
493 | | |
494 | 14.4k | SkScalar dTheta = steps > 0 ? theta / steps : 0; |
495 | 16.8k | *rotSin = SkScalarSin(dTheta); |
496 | 16.8k | *rotCos = SkScalarCos(dTheta); |
497 | | // Our offset may be so large that we end up with a tiny dTheta, in which case we |
498 | | // lose precision when computing rotSin and rotCos. |
499 | 16.8k | if (steps > 0 && (*rotSin == 0 || *rotCos == 1)) { |
500 | 5 | return false; |
501 | 5 | } |
502 | 16.8k | *n = steps; |
503 | 16.8k | return true; |
504 | 16.8k | } |
505 | | |
506 | | /////////////////////////////////////////////////////////////////////////////////////////// |
507 | | |
508 | | // a point is "left" to another if its x-coord is less, or if equal, its y-coord is greater |
509 | 1.89M | static bool left(const SkPoint& p0, const SkPoint& p1) { |
510 | 1.89M | return p0.fX < p1.fX || (!(p0.fX > p1.fX) && p0.fY > p1.fY); |
511 | 1.89M | } |
512 | | |
513 | | // a point is "right" to another if its x-coord is greater, or if equal, its y-coord is less |
514 | 0 | static bool right(const SkPoint& p0, const SkPoint& p1) { |
515 | 0 | return p0.fX > p1.fX || (!(p0.fX < p1.fX) && p0.fY < p1.fY); |
516 | 0 | } |
517 | | |
518 | | struct Vertex { |
519 | 1.55M | static bool Left(const Vertex& qv0, const Vertex& qv1) { |
520 | 1.55M | return left(qv0.fPosition, qv1.fPosition); |
521 | 1.55M | } |
522 | | |
523 | | // packed to fit into 16 bytes (one cache line) |
524 | | SkPoint fPosition; |
525 | | uint16_t fIndex; // index in unsorted polygon |
526 | | uint16_t fPrevIndex; // indices for previous and next vertex in unsorted polygon |
527 | | uint16_t fNextIndex; |
528 | | uint16_t fFlags; |
529 | | }; |
530 | | |
531 | | enum VertexFlags { |
532 | | kPrevLeft_VertexFlag = 0x1, |
533 | | kNextLeft_VertexFlag = 0x2, |
534 | | }; |
535 | | |
536 | | struct ActiveEdge { |
537 | 604 | ActiveEdge() : fChild{ nullptr, nullptr }, fAbove(nullptr), fBelow(nullptr), fRed(false) {} |
538 | | ActiveEdge(const SkPoint& p0, const SkVector& v, uint16_t index0, uint16_t index1) |
539 | | : fSegment({ p0, v }) |
540 | | , fIndex0(index0) |
541 | | , fIndex1(index1) |
542 | | , fAbove(nullptr) |
543 | | , fBelow(nullptr) |
544 | 6.89k | , fRed(true) { |
545 | 6.89k | fChild[0] = nullptr; |
546 | 6.89k | fChild[1] = nullptr; |
547 | 6.89k | } |
548 | | |
549 | | // Returns true if "this" is above "that", assuming this->p0 is to the left of that->p0 |
550 | | // This is only used to verify the edgelist -- the actual test for insertion/deletion is much |
551 | | // simpler because we can make certain assumptions then. |
552 | 0 | bool aboveIfLeft(const ActiveEdge* that) const { |
553 | 0 | const SkPoint& p0 = this->fSegment.fP0; |
554 | 0 | const SkPoint& q0 = that->fSegment.fP0; |
555 | 0 | SkASSERT(p0.fX <= q0.fX); |
556 | 0 | SkVector d = q0 - p0; |
557 | 0 | const SkVector& v = this->fSegment.fV; |
558 | 0 | const SkVector& w = that->fSegment.fV; |
559 | | // The idea here is that if the vector between the origins of the two segments (d) |
560 | | // rotates counterclockwise up to the vector representing the "this" segment (v), |
561 | | // then we know that "this" is above "that". If the result is clockwise we say it's below. |
562 | 0 | if (this->fIndex0 != that->fIndex0) { |
563 | 0 | SkScalar cross = d.cross(v); |
564 | 0 | if (cross > kCrossTolerance) { |
565 | 0 | return true; |
566 | 0 | } else if (cross < -kCrossTolerance) { |
567 | 0 | return false; |
568 | 0 | } |
569 | 0 | } else if (this->fIndex1 == that->fIndex1) { |
570 | 0 | return false; |
571 | 0 | } |
572 | | // At this point either the two origins are nearly equal or the origin of "that" |
573 | | // lies on dv. So then we try the same for the vector from the tail of "this" |
574 | | // to the head of "that". Again, ccw means "this" is above "that". |
575 | | // d = that.P1 - this.P0 |
576 | | // = that.fP0 + that.fV - this.fP0 |
577 | | // = that.fP0 - this.fP0 + that.fV |
578 | | // = old_d + that.fV |
579 | 0 | d += w; |
580 | 0 | SkScalar cross = d.cross(v); |
581 | 0 | if (cross > kCrossTolerance) { |
582 | 0 | return true; |
583 | 0 | } else if (cross < -kCrossTolerance) { |
584 | 0 | return false; |
585 | 0 | } |
586 | | // If the previous check fails, the two segments are nearly collinear |
587 | | // First check y-coord of first endpoints |
588 | 0 | if (p0.fX < q0.fX) { |
589 | 0 | return (p0.fY >= q0.fY); |
590 | 0 | } else if (p0.fY > q0.fY) { |
591 | 0 | return true; |
592 | 0 | } else if (p0.fY < q0.fY) { |
593 | 0 | return false; |
594 | 0 | } |
595 | | // The first endpoints are the same, so check the other endpoint |
596 | 0 | SkPoint p1 = p0 + v; |
597 | 0 | SkPoint q1 = q0 + w; |
598 | 0 | if (p1.fX < q1.fX) { |
599 | 0 | return (p1.fY >= q1.fY); |
600 | 0 | } else { |
601 | 0 | return (p1.fY > q1.fY); |
602 | 0 | } |
603 | 0 | } Unexecuted instantiation: ActiveEdge::aboveIfLeft(ActiveEdge const*) const Unexecuted instantiation: ActiveEdge::aboveIfLeft(ActiveEdge const*) const |
604 | | |
605 | | // same as leftAndAbove(), but generalized |
606 | 0 | bool above(const ActiveEdge* that) const { |
607 | 0 | const SkPoint& p0 = this->fSegment.fP0; |
608 | 0 | const SkPoint& q0 = that->fSegment.fP0; |
609 | 0 | if (right(p0, q0)) { |
610 | 0 | return !that->aboveIfLeft(this); |
611 | 0 | } else { |
612 | 0 | return this->aboveIfLeft(that); |
613 | 0 | } |
614 | 0 | } |
615 | | |
616 | 180k | bool intersect(const SkPoint& q0, const SkVector& w, uint16_t index0, uint16_t index1) const { |
617 | | // check first to see if these edges are neighbors in the polygon |
618 | 180k | if (this->fIndex0 == index0 || this->fIndex1 == index0 || |
619 | 174k | this->fIndex0 == index1 || this->fIndex1 == index1) { |
620 | 11.5k | return false; |
621 | 11.5k | } |
622 | | |
623 | | // We don't need the exact intersection point so we can do a simpler test here. |
624 | 169k | const SkPoint& p0 = this->fSegment.fP0; |
625 | 169k | const SkVector& v = this->fSegment.fV; |
626 | 169k | SkPoint p1 = p0 + v; |
627 | 169k | SkPoint q1 = q0 + w; |
628 | | |
629 | | // We assume some x-overlap due to how the edgelist works |
630 | | // This allows us to simplify our test |
631 | | // We need some slop here because storing the vector and recomputing the second endpoint |
632 | | // doesn't necessary give us the original result in floating point. |
633 | | // TODO: Store vector as double? Store endpoint as well? |
634 | 169k | SkASSERT(q0.fX <= p1.fX + SK_ScalarNearlyZero); |
635 | | |
636 | | // if each segment straddles the other (i.e., the endpoints have different sides) |
637 | | // then they intersect |
638 | 169k | bool result; |
639 | 169k | if (p0.fX < q0.fX) { |
640 | 102k | if (q1.fX < p1.fX) { |
641 | 46.2k | result = (compute_side(p0, v, q0)*compute_side(p0, v, q1) < 0); |
642 | 56.0k | } else { |
643 | 56.0k | result = (compute_side(p0, v, q0)*compute_side(q0, w, p1) > 0); |
644 | 56.0k | } |
645 | 67.0k | } else { |
646 | 67.0k | if (p1.fX < q1.fX) { |
647 | 330 | result = (compute_side(q0, w, p0)*compute_side(q0, w, p1) < 0); |
648 | 66.7k | } else { |
649 | 66.7k | result = (compute_side(q0, w, p0)*compute_side(p0, v, q1) > 0); |
650 | 66.7k | } |
651 | 67.0k | } |
652 | 169k | return result; |
653 | 169k | } |
654 | | |
655 | 89.7k | bool intersect(const ActiveEdge* edge) { |
656 | 89.7k | return this->intersect(edge->fSegment.fP0, edge->fSegment.fV, edge->fIndex0, edge->fIndex1); |
657 | 89.7k | } |
658 | | |
659 | 0 | bool lessThan(const ActiveEdge* that) const { |
660 | 0 | SkASSERT(!this->above(this)); |
661 | 0 | SkASSERT(!that->above(that)); |
662 | 0 | SkASSERT(!(this->above(that) && that->above(this))); |
663 | 0 | return this->above(that); |
664 | 0 | } Unexecuted instantiation: ActiveEdge::lessThan(ActiveEdge const*) const Unexecuted instantiation: ActiveEdge::lessThan(ActiveEdge const*) const |
665 | | |
666 | 137k | bool equals(uint16_t index0, uint16_t index1) const { |
667 | 137k | return (this->fIndex0 == index0 && this->fIndex1 == index1); |
668 | 137k | } |
669 | | |
670 | | OffsetSegment fSegment; |
671 | | uint16_t fIndex0; // indices for previous and next vertex in polygon |
672 | | uint16_t fIndex1; |
673 | | ActiveEdge* fChild[2]; |
674 | | ActiveEdge* fAbove; |
675 | | ActiveEdge* fBelow; |
676 | | int32_t fRed; |
677 | | }; |
678 | | |
679 | | class ActiveEdgeList { |
680 | | public: |
681 | 604 | ActiveEdgeList(int maxEdges) { |
682 | 604 | fAllocation = (char*) sk_malloc_throw(sizeof(ActiveEdge)*maxEdges); |
683 | 604 | fCurrFree = 0; |
684 | 604 | fMaxFree = maxEdges; |
685 | 604 | } |
686 | 604 | ~ActiveEdgeList() { |
687 | 604 | fTreeHead.fChild[1] = nullptr; |
688 | 604 | sk_free(fAllocation); |
689 | 604 | } |
690 | | |
691 | 7.27k | bool insert(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { |
692 | 7.27k | SkVector v = p1 - p0; |
693 | 7.27k | if (!v.isFinite()) { |
694 | 23 | return false; |
695 | 23 | } |
696 | | // empty tree case -- easy |
697 | 7.25k | if (!fTreeHead.fChild[1]) { |
698 | 594 | ActiveEdge* root = fTreeHead.fChild[1] = this->allocate(p0, v, index0, index1); |
699 | 594 | SkASSERT(root); |
700 | 594 | if (!root) { |
701 | 0 | return false; |
702 | 0 | } |
703 | 594 | root->fRed = false; |
704 | 594 | return true; |
705 | 594 | } |
706 | | |
707 | | // set up helpers |
708 | 6.66k | ActiveEdge* top = &fTreeHead; |
709 | 6.66k | ActiveEdge *grandparent = nullptr; |
710 | 6.66k | ActiveEdge *parent = nullptr; |
711 | 6.66k | ActiveEdge *curr = top->fChild[1]; |
712 | 6.66k | int dir = 0; |
713 | 6.66k | int last = 0; // ? |
714 | | // predecessor and successor, for intersection check |
715 | 6.66k | ActiveEdge* pred = nullptr; |
716 | 6.66k | ActiveEdge* succ = nullptr; |
717 | | |
718 | | // search down the tree |
719 | 21.3k | while (true) { |
720 | 21.3k | if (!curr) { |
721 | | // check for intersection with predecessor and successor |
722 | 6.44k | if ((pred && pred->intersect(p0, v, index0, index1)) || |
723 | 6.35k | (succ && succ->intersect(p0, v, index0, index1))) { |
724 | 141 | return false; |
725 | 141 | } |
726 | | // insert new node at bottom |
727 | 6.29k | parent->fChild[dir] = curr = this->allocate(p0, v, index0, index1); |
728 | 6.29k | SkASSERT(curr); |
729 | 6.29k | if (!curr) { |
730 | 0 | return false; |
731 | 0 | } |
732 | 6.29k | curr->fAbove = pred; |
733 | 6.29k | curr->fBelow = succ; |
734 | 6.29k | if (pred) { |
735 | 4.70k | pred->fBelow = curr; |
736 | 4.70k | } |
737 | 6.29k | if (succ) { |
738 | 5.76k | succ->fAbove = curr; |
739 | 5.76k | } |
740 | 6.29k | if (IsRed(parent)) { |
741 | 1.63k | int dir2 = (top->fChild[1] == grandparent); |
742 | 1.63k | if (curr == parent->fChild[last]) { |
743 | 801 | top->fChild[dir2] = SingleRotation(grandparent, !last); |
744 | 832 | } else { |
745 | 832 | top->fChild[dir2] = DoubleRotation(grandparent, !last); |
746 | 832 | } |
747 | 1.63k | } |
748 | 6.29k | break; |
749 | 14.9k | } else if (IsRed(curr->fChild[0]) && IsRed(curr->fChild[1])) { |
750 | | // color flip |
751 | 2.31k | curr->fRed = true; |
752 | 2.31k | curr->fChild[0]->fRed = false; |
753 | 2.31k | curr->fChild[1]->fRed = false; |
754 | 2.31k | if (IsRed(parent)) { |
755 | 338 | int dir2 = (top->fChild[1] == grandparent); |
756 | 338 | if (curr == parent->fChild[last]) { |
757 | 312 | top->fChild[dir2] = SingleRotation(grandparent, !last); |
758 | 26 | } else { |
759 | 26 | top->fChild[dir2] = DoubleRotation(grandparent, !last); |
760 | 26 | } |
761 | 338 | } |
762 | 2.31k | } |
763 | | |
764 | 14.9k | last = dir; |
765 | 14.9k | int side; |
766 | | // check to see if segment is above or below |
767 | 14.9k | if (curr->fIndex0 == index0) { |
768 | 3.50k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); |
769 | 11.4k | } else { |
770 | 11.4k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); |
771 | 11.4k | } |
772 | 14.9k | if (0 == side) { |
773 | 221 | return false; |
774 | 221 | } |
775 | 14.7k | dir = (side < 0); |
776 | | |
777 | 14.7k | if (0 == dir) { |
778 | 7.87k | succ = curr; |
779 | 6.84k | } else { |
780 | 6.84k | pred = curr; |
781 | 6.84k | } |
782 | | |
783 | | // update helpers |
784 | 14.7k | if (grandparent) { |
785 | 2.77k | top = grandparent; |
786 | 2.77k | } |
787 | 14.7k | grandparent = parent; |
788 | 14.7k | parent = curr; |
789 | 14.7k | curr = curr->fChild[dir]; |
790 | 14.7k | } |
791 | | |
792 | | // update root and make it black |
793 | 6.29k | fTreeHead.fChild[1]->fRed = false; |
794 | | |
795 | 6.29k | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); |
796 | | |
797 | 6.29k | return true; |
798 | 6.66k | } |
799 | | |
800 | | // replaces edge p0p1 with p1p2 |
801 | | bool replace(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
802 | 130k | uint16_t index0, uint16_t index1, uint16_t index2) { |
803 | 130k | if (!fTreeHead.fChild[1]) { |
804 | 0 | return false; |
805 | 0 | } |
806 | | |
807 | 130k | SkVector v = p2 - p1; |
808 | 130k | ActiveEdge* curr = &fTreeHead; |
809 | 130k | ActiveEdge* found = nullptr; |
810 | 130k | int dir = 1; |
811 | | |
812 | | // search |
813 | 244k | while (curr->fChild[dir] != nullptr) { |
814 | | // update helpers |
815 | 244k | curr = curr->fChild[dir]; |
816 | | // save found node |
817 | 244k | if (curr->equals(index0, index1)) { |
818 | 130k | found = curr; |
819 | 130k | break; |
820 | 113k | } else { |
821 | | // check to see if segment is above or below |
822 | 113k | int side; |
823 | 113k | if (curr->fIndex1 == index1) { |
824 | 6 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); |
825 | 113k | } else { |
826 | 113k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); |
827 | 113k | } |
828 | 113k | if (0 == side) { |
829 | 32 | return false; |
830 | 32 | } |
831 | 113k | dir = (side < 0); |
832 | 113k | } |
833 | 244k | } |
834 | | |
835 | 130k | if (!found) { |
836 | 18 | return false; |
837 | 18 | } |
838 | | |
839 | | // replace if found |
840 | 130k | ActiveEdge* pred = found->fAbove; |
841 | 130k | ActiveEdge* succ = found->fBelow; |
842 | | // check deletion and insert intersection cases |
843 | 130k | if (pred && (pred->intersect(found) || pred->intersect(p1, v, index1, index2))) { |
844 | 74 | return false; |
845 | 74 | } |
846 | 130k | if (succ && (succ->intersect(found) || succ->intersect(p1, v, index1, index2))) { |
847 | 42 | return false; |
848 | 42 | } |
849 | 130k | found->fSegment.fP0 = p1; |
850 | 130k | found->fSegment.fV = v; |
851 | 130k | found->fIndex0 = index1; |
852 | 130k | found->fIndex1 = index2; |
853 | | // above and below should stay the same |
854 | | |
855 | 65.0k | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); |
856 | | |
857 | 130k | return true; |
858 | 130k | } ActiveEdgeList::replace(SkPoint const&, SkPoint const&, SkPoint const&, unsigned short, unsigned short, unsigned short) Line | Count | Source | 802 | 65.1k | uint16_t index0, uint16_t index1, uint16_t index2) { | 803 | 65.1k | if (!fTreeHead.fChild[1]) { | 804 | 0 | return false; | 805 | 0 | } | 806 | | | 807 | 65.1k | SkVector v = p2 - p1; | 808 | 65.1k | ActiveEdge* curr = &fTreeHead; | 809 | 65.1k | ActiveEdge* found = nullptr; | 810 | 65.1k | int dir = 1; | 811 | | | 812 | | // search | 813 | 122k | while (curr->fChild[dir] != nullptr) { | 814 | | // update helpers | 815 | 122k | curr = curr->fChild[dir]; | 816 | | // save found node | 817 | 122k | if (curr->equals(index0, index1)) { | 818 | 65.0k | found = curr; | 819 | 65.0k | break; | 820 | 56.9k | } else { | 821 | | // check to see if segment is above or below | 822 | 56.9k | int side; | 823 | 56.9k | if (curr->fIndex1 == index1) { | 824 | 3 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); | 825 | 56.9k | } else { | 826 | 56.9k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); | 827 | 56.9k | } | 828 | 56.9k | if (0 == side) { | 829 | 16 | return false; | 830 | 16 | } | 831 | 56.9k | dir = (side < 0); | 832 | 56.9k | } | 833 | 122k | } | 834 | | | 835 | 65.1k | if (!found) { | 836 | 9 | return false; | 837 | 9 | } | 838 | | | 839 | | // replace if found | 840 | 65.0k | ActiveEdge* pred = found->fAbove; | 841 | 65.0k | ActiveEdge* succ = found->fBelow; | 842 | | // check deletion and insert intersection cases | 843 | 65.0k | if (pred && (pred->intersect(found) || pred->intersect(p1, v, index1, index2))) { | 844 | 37 | return false; | 845 | 37 | } | 846 | 65.0k | if (succ && (succ->intersect(found) || succ->intersect(p1, v, index1, index2))) { | 847 | 21 | return false; | 848 | 21 | } | 849 | 65.0k | found->fSegment.fP0 = p1; | 850 | 65.0k | found->fSegment.fV = v; | 851 | 65.0k | found->fIndex0 = index1; | 852 | 65.0k | found->fIndex1 = index2; | 853 | | // above and below should stay the same | 854 | | | 855 | 65.0k | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); | 856 | | | 857 | 65.0k | return true; | 858 | 65.0k | } |
ActiveEdgeList::replace(SkPoint const&, SkPoint const&, SkPoint const&, unsigned short, unsigned short, unsigned short) Line | Count | Source | 802 | 65.1k | uint16_t index0, uint16_t index1, uint16_t index2) { | 803 | 65.1k | if (!fTreeHead.fChild[1]) { | 804 | 0 | return false; | 805 | 0 | } | 806 | | | 807 | 65.1k | SkVector v = p2 - p1; | 808 | 65.1k | ActiveEdge* curr = &fTreeHead; | 809 | 65.1k | ActiveEdge* found = nullptr; | 810 | 65.1k | int dir = 1; | 811 | | | 812 | | // search | 813 | 122k | while (curr->fChild[dir] != nullptr) { | 814 | | // update helpers | 815 | 122k | curr = curr->fChild[dir]; | 816 | | // save found node | 817 | 122k | if (curr->equals(index0, index1)) { | 818 | 65.0k | found = curr; | 819 | 65.0k | break; | 820 | 56.9k | } else { | 821 | | // check to see if segment is above or below | 822 | 56.9k | int side; | 823 | 56.9k | if (curr->fIndex1 == index1) { | 824 | 3 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); | 825 | 56.9k | } else { | 826 | 56.9k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); | 827 | 56.9k | } | 828 | 56.9k | if (0 == side) { | 829 | 16 | return false; | 830 | 16 | } | 831 | 56.9k | dir = (side < 0); | 832 | 56.9k | } | 833 | 122k | } | 834 | | | 835 | 65.1k | if (!found) { | 836 | 9 | return false; | 837 | 9 | } | 838 | | | 839 | | // replace if found | 840 | 65.0k | ActiveEdge* pred = found->fAbove; | 841 | 65.0k | ActiveEdge* succ = found->fBelow; | 842 | | // check deletion and insert intersection cases | 843 | 65.0k | if (pred && (pred->intersect(found) || pred->intersect(p1, v, index1, index2))) { | 844 | 37 | return false; | 845 | 37 | } | 846 | 65.0k | if (succ && (succ->intersect(found) || succ->intersect(p1, v, index1, index2))) { | 847 | 21 | return false; | 848 | 21 | } | 849 | 65.0k | found->fSegment.fP0 = p1; | 850 | 65.0k | found->fSegment.fV = v; | 851 | 65.0k | found->fIndex0 = index1; | 852 | 65.0k | found->fIndex1 = index2; | 853 | | // above and below should stay the same | 854 | | | 855 | 65.0k | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); | 856 | | | 857 | 65.0k | return true; | 858 | 65.0k | } |
|
859 | | |
860 | 11.3k | bool remove(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { |
861 | 11.3k | if (!fTreeHead.fChild[1]) { |
862 | 0 | return false; |
863 | 0 | } |
864 | | |
865 | 11.3k | ActiveEdge* curr = &fTreeHead; |
866 | 11.3k | ActiveEdge* parent = nullptr; |
867 | 11.3k | ActiveEdge* grandparent = nullptr; |
868 | 11.3k | ActiveEdge* found = nullptr; |
869 | 11.3k | int dir = 1; |
870 | | |
871 | | // search and push a red node down |
872 | 42.0k | while (curr->fChild[dir] != nullptr) { |
873 | 30.8k | int last = dir; |
874 | | |
875 | | // update helpers |
876 | 30.8k | grandparent = parent; |
877 | 30.8k | parent = curr; |
878 | 30.8k | curr = curr->fChild[dir]; |
879 | | // save found node |
880 | 30.8k | if (curr->equals(index0, index1)) { |
881 | 11.0k | found = curr; |
882 | 11.0k | dir = 0; |
883 | 19.7k | } else { |
884 | | // check to see if segment is above or below |
885 | 19.7k | int side; |
886 | 19.7k | if (curr->fIndex1 == index1) { |
887 | 5.52k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); |
888 | 14.2k | } else { |
889 | 14.2k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); |
890 | 14.2k | } |
891 | 19.7k | if (0 == side) { |
892 | 28 | return false; |
893 | 28 | } |
894 | 19.7k | dir = (side < 0); |
895 | 19.7k | } |
896 | | |
897 | | // push the red node down |
898 | 30.7k | if (!IsRed(curr) && !IsRed(curr->fChild[dir])) { |
899 | 13.7k | if (IsRed(curr->fChild[!dir])) { |
900 | 1.76k | parent = parent->fChild[last] = SingleRotation(curr, dir); |
901 | 11.9k | } else { |
902 | 11.9k | ActiveEdge *s = parent->fChild[!last]; |
903 | | |
904 | 11.9k | if (s != nullptr) { |
905 | 5.10k | if (!IsRed(s->fChild[!last]) && !IsRed(s->fChild[last])) { |
906 | | // color flip |
907 | 4.13k | parent->fRed = false; |
908 | 4.13k | s->fRed = true; |
909 | 4.13k | curr->fRed = true; |
910 | 972 | } else { |
911 | 972 | int dir2 = (grandparent->fChild[1] == parent); |
912 | | |
913 | 972 | if (IsRed(s->fChild[last])) { |
914 | 850 | grandparent->fChild[dir2] = DoubleRotation(parent, last); |
915 | 122 | } else if (IsRed(s->fChild[!last])) { |
916 | 122 | grandparent->fChild[dir2] = SingleRotation(parent, last); |
917 | 122 | } |
918 | | |
919 | | // ensure correct coloring |
920 | 972 | curr->fRed = grandparent->fChild[dir2]->fRed = true; |
921 | 972 | grandparent->fChild[dir2]->fChild[0]->fRed = false; |
922 | 972 | grandparent->fChild[dir2]->fChild[1]->fRed = false; |
923 | 972 | } |
924 | 5.10k | } |
925 | 11.9k | } |
926 | 13.7k | } |
927 | 30.7k | } |
928 | | |
929 | | // replace and remove if found |
930 | 11.2k | if (found) { |
931 | 11.0k | ActiveEdge* pred = found->fAbove; |
932 | 11.0k | ActiveEdge* succ = found->fBelow; |
933 | 11.0k | if ((pred && pred->intersect(found)) || (succ && succ->intersect(found))) { |
934 | 26 | return false; |
935 | 26 | } |
936 | 11.0k | if (found != curr) { |
937 | 4.63k | found->fSegment = curr->fSegment; |
938 | 4.63k | found->fIndex0 = curr->fIndex0; |
939 | 4.63k | found->fIndex1 = curr->fIndex1; |
940 | 4.63k | found->fAbove = curr->fAbove; |
941 | 4.63k | pred = found->fAbove; |
942 | | // we don't need to set found->fBelow here |
943 | 6.39k | } else { |
944 | 6.39k | if (succ) { |
945 | 3.80k | succ->fAbove = pred; |
946 | 3.80k | } |
947 | 6.39k | } |
948 | 11.0k | if (pred) { |
949 | 8.93k | pred->fBelow = curr->fBelow; |
950 | 8.93k | } |
951 | 11.0k | parent->fChild[parent->fChild[1] == curr] = curr->fChild[!curr->fChild[0]]; |
952 | | |
953 | | // no need to delete |
954 | 11.0k | curr->fAbove = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); |
955 | 11.0k | curr->fBelow = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); |
956 | 11.0k | if (fTreeHead.fChild[1]) { |
957 | 10.8k | fTreeHead.fChild[1]->fRed = false; |
958 | 10.8k | } |
959 | 11.0k | } |
960 | | |
961 | | // update root and make it black |
962 | 11.2k | if (fTreeHead.fChild[1]) { |
963 | 11.1k | fTreeHead.fChild[1]->fRed = false; |
964 | 11.1k | } |
965 | | |
966 | 5.63k | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); |
967 | | |
968 | 11.2k | return true; |
969 | 11.2k | } ActiveEdgeList::remove(SkPoint const&, SkPoint const&, unsigned short, unsigned short) Line | Count | Source | 860 | 5.65k | bool remove(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { | 861 | 5.65k | if (!fTreeHead.fChild[1]) { | 862 | 0 | return false; | 863 | 0 | } | 864 | | | 865 | 5.65k | ActiveEdge* curr = &fTreeHead; | 866 | 5.65k | ActiveEdge* parent = nullptr; | 867 | 5.65k | ActiveEdge* grandparent = nullptr; | 868 | 5.65k | ActiveEdge* found = nullptr; | 869 | 5.65k | int dir = 1; | 870 | | | 871 | | // search and push a red node down | 872 | 21.0k | while (curr->fChild[dir] != nullptr) { | 873 | 15.4k | int last = dir; | 874 | | | 875 | | // update helpers | 876 | 15.4k | grandparent = parent; | 877 | 15.4k | parent = curr; | 878 | 15.4k | curr = curr->fChild[dir]; | 879 | | // save found node | 880 | 15.4k | if (curr->equals(index0, index1)) { | 881 | 5.52k | found = curr; | 882 | 5.52k | dir = 0; | 883 | 9.87k | } else { | 884 | | // check to see if segment is above or below | 885 | 9.87k | int side; | 886 | 9.87k | if (curr->fIndex1 == index1) { | 887 | 2.76k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); | 888 | 7.11k | } else { | 889 | 7.11k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); | 890 | 7.11k | } | 891 | 9.87k | if (0 == side) { | 892 | 14 | return false; | 893 | 14 | } | 894 | 9.86k | dir = (side < 0); | 895 | 9.86k | } | 896 | | | 897 | | // push the red node down | 898 | 15.3k | if (!IsRed(curr) && !IsRed(curr->fChild[dir])) { | 899 | 6.86k | if (IsRed(curr->fChild[!dir])) { | 900 | 880 | parent = parent->fChild[last] = SingleRotation(curr, dir); | 901 | 5.98k | } else { | 902 | 5.98k | ActiveEdge *s = parent->fChild[!last]; | 903 | | | 904 | 5.98k | if (s != nullptr) { | 905 | 2.55k | if (!IsRed(s->fChild[!last]) && !IsRed(s->fChild[last])) { | 906 | | // color flip | 907 | 2.06k | parent->fRed = false; | 908 | 2.06k | s->fRed = true; | 909 | 2.06k | curr->fRed = true; | 910 | 486 | } else { | 911 | 486 | int dir2 = (grandparent->fChild[1] == parent); | 912 | | | 913 | 486 | if (IsRed(s->fChild[last])) { | 914 | 425 | grandparent->fChild[dir2] = DoubleRotation(parent, last); | 915 | 61 | } else if (IsRed(s->fChild[!last])) { | 916 | 61 | grandparent->fChild[dir2] = SingleRotation(parent, last); | 917 | 61 | } | 918 | | | 919 | | // ensure correct coloring | 920 | 486 | curr->fRed = grandparent->fChild[dir2]->fRed = true; | 921 | 486 | grandparent->fChild[dir2]->fChild[0]->fRed = false; | 922 | 486 | grandparent->fChild[dir2]->fChild[1]->fRed = false; | 923 | 486 | } | 924 | 2.55k | } | 925 | 5.98k | } | 926 | 6.86k | } | 927 | 15.3k | } | 928 | | | 929 | | // replace and remove if found | 930 | 5.64k | if (found) { | 931 | 5.52k | ActiveEdge* pred = found->fAbove; | 932 | 5.52k | ActiveEdge* succ = found->fBelow; | 933 | 5.52k | if ((pred && pred->intersect(found)) || (succ && succ->intersect(found))) { | 934 | 13 | return false; | 935 | 13 | } | 936 | 5.51k | if (found != curr) { | 937 | 2.31k | found->fSegment = curr->fSegment; | 938 | 2.31k | found->fIndex0 = curr->fIndex0; | 939 | 2.31k | found->fIndex1 = curr->fIndex1; | 940 | 2.31k | found->fAbove = curr->fAbove; | 941 | 2.31k | pred = found->fAbove; | 942 | | // we don't need to set found->fBelow here | 943 | 3.19k | } else { | 944 | 3.19k | if (succ) { | 945 | 1.90k | succ->fAbove = pred; | 946 | 1.90k | } | 947 | 3.19k | } | 948 | 5.51k | if (pred) { | 949 | 4.46k | pred->fBelow = curr->fBelow; | 950 | 4.46k | } | 951 | 5.51k | parent->fChild[parent->fChild[1] == curr] = curr->fChild[!curr->fChild[0]]; | 952 | | | 953 | | // no need to delete | 954 | 5.51k | curr->fAbove = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); | 955 | 5.51k | curr->fBelow = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); | 956 | 5.51k | if (fTreeHead.fChild[1]) { | 957 | 5.43k | fTreeHead.fChild[1]->fRed = false; | 958 | 5.43k | } | 959 | 5.51k | } | 960 | | | 961 | | // update root and make it black | 962 | 5.63k | if (fTreeHead.fChild[1]) { | 963 | 5.55k | fTreeHead.fChild[1]->fRed = false; | 964 | 5.55k | } | 965 | | | 966 | 5.63k | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); | 967 | | | 968 | 5.63k | return true; | 969 | 5.64k | } |
ActiveEdgeList::remove(SkPoint const&, SkPoint const&, unsigned short, unsigned short) Line | Count | Source | 860 | 5.65k | bool remove(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { | 861 | 5.65k | if (!fTreeHead.fChild[1]) { | 862 | 0 | return false; | 863 | 0 | } | 864 | | | 865 | 5.65k | ActiveEdge* curr = &fTreeHead; | 866 | 5.65k | ActiveEdge* parent = nullptr; | 867 | 5.65k | ActiveEdge* grandparent = nullptr; | 868 | 5.65k | ActiveEdge* found = nullptr; | 869 | 5.65k | int dir = 1; | 870 | | | 871 | | // search and push a red node down | 872 | 21.0k | while (curr->fChild[dir] != nullptr) { | 873 | 15.4k | int last = dir; | 874 | | | 875 | | // update helpers | 876 | 15.4k | grandparent = parent; | 877 | 15.4k | parent = curr; | 878 | 15.4k | curr = curr->fChild[dir]; | 879 | | // save found node | 880 | 15.4k | if (curr->equals(index0, index1)) { | 881 | 5.52k | found = curr; | 882 | 5.52k | dir = 0; | 883 | 9.87k | } else { | 884 | | // check to see if segment is above or below | 885 | 9.87k | int side; | 886 | 9.87k | if (curr->fIndex1 == index1) { | 887 | 2.76k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); | 888 | 7.11k | } else { | 889 | 7.11k | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); | 890 | 7.11k | } | 891 | 9.87k | if (0 == side) { | 892 | 14 | return false; | 893 | 14 | } | 894 | 9.86k | dir = (side < 0); | 895 | 9.86k | } | 896 | | | 897 | | // push the red node down | 898 | 15.3k | if (!IsRed(curr) && !IsRed(curr->fChild[dir])) { | 899 | 6.86k | if (IsRed(curr->fChild[!dir])) { | 900 | 880 | parent = parent->fChild[last] = SingleRotation(curr, dir); | 901 | 5.98k | } else { | 902 | 5.98k | ActiveEdge *s = parent->fChild[!last]; | 903 | | | 904 | 5.98k | if (s != nullptr) { | 905 | 2.55k | if (!IsRed(s->fChild[!last]) && !IsRed(s->fChild[last])) { | 906 | | // color flip | 907 | 2.06k | parent->fRed = false; | 908 | 2.06k | s->fRed = true; | 909 | 2.06k | curr->fRed = true; | 910 | 486 | } else { | 911 | 486 | int dir2 = (grandparent->fChild[1] == parent); | 912 | | | 913 | 486 | if (IsRed(s->fChild[last])) { | 914 | 425 | grandparent->fChild[dir2] = DoubleRotation(parent, last); | 915 | 61 | } else if (IsRed(s->fChild[!last])) { | 916 | 61 | grandparent->fChild[dir2] = SingleRotation(parent, last); | 917 | 61 | } | 918 | | | 919 | | // ensure correct coloring | 920 | 486 | curr->fRed = grandparent->fChild[dir2]->fRed = true; | 921 | 486 | grandparent->fChild[dir2]->fChild[0]->fRed = false; | 922 | 486 | grandparent->fChild[dir2]->fChild[1]->fRed = false; | 923 | 486 | } | 924 | 2.55k | } | 925 | 5.98k | } | 926 | 6.86k | } | 927 | 15.3k | } | 928 | | | 929 | | // replace and remove if found | 930 | 5.64k | if (found) { | 931 | 5.52k | ActiveEdge* pred = found->fAbove; | 932 | 5.52k | ActiveEdge* succ = found->fBelow; | 933 | 5.52k | if ((pred && pred->intersect(found)) || (succ && succ->intersect(found))) { | 934 | 13 | return false; | 935 | 13 | } | 936 | 5.51k | if (found != curr) { | 937 | 2.31k | found->fSegment = curr->fSegment; | 938 | 2.31k | found->fIndex0 = curr->fIndex0; | 939 | 2.31k | found->fIndex1 = curr->fIndex1; | 940 | 2.31k | found->fAbove = curr->fAbove; | 941 | 2.31k | pred = found->fAbove; | 942 | | // we don't need to set found->fBelow here | 943 | 3.19k | } else { | 944 | 3.19k | if (succ) { | 945 | 1.90k | succ->fAbove = pred; | 946 | 1.90k | } | 947 | 3.19k | } | 948 | 5.51k | if (pred) { | 949 | 4.46k | pred->fBelow = curr->fBelow; | 950 | 4.46k | } | 951 | 5.51k | parent->fChild[parent->fChild[1] == curr] = curr->fChild[!curr->fChild[0]]; | 952 | | | 953 | | // no need to delete | 954 | 5.51k | curr->fAbove = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); | 955 | 5.51k | curr->fBelow = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); | 956 | 5.51k | if (fTreeHead.fChild[1]) { | 957 | 5.43k | fTreeHead.fChild[1]->fRed = false; | 958 | 5.43k | } | 959 | 5.51k | } | 960 | | | 961 | | // update root and make it black | 962 | 5.63k | if (fTreeHead.fChild[1]) { | 963 | 5.55k | fTreeHead.fChild[1]->fRed = false; | 964 | 5.55k | } | 965 | | | 966 | 5.63k | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); | 967 | | | 968 | 5.63k | return true; | 969 | 5.64k | } |
|
970 | | |
971 | | private: |
972 | | // allocator |
973 | 6.89k | ActiveEdge * allocate(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { |
974 | 6.89k | if (fCurrFree >= fMaxFree) { |
975 | 0 | return nullptr; |
976 | 0 | } |
977 | 6.89k | char* bytes = fAllocation + sizeof(ActiveEdge)*fCurrFree; |
978 | 6.89k | ++fCurrFree; |
979 | 6.89k | return new(bytes) ActiveEdge(p0, p1, index0, index1); |
980 | 6.89k | } |
981 | | |
982 | | /////////////////////////////////////////////////////////////////////////////////// |
983 | | // Red-black tree methods |
984 | | /////////////////////////////////////////////////////////////////////////////////// |
985 | 66.9k | static bool IsRed(const ActiveEdge* node) { |
986 | 66.9k | return node && node->fRed; |
987 | 66.9k | } |
988 | | |
989 | 4.62k | static ActiveEdge* SingleRotation(ActiveEdge* node, int dir) { |
990 | 4.62k | ActiveEdge* tmp = node->fChild[!dir]; |
991 | | |
992 | 4.62k | node->fChild[!dir] = tmp->fChild[dir]; |
993 | 4.62k | tmp->fChild[dir] = node; |
994 | | |
995 | 4.62k | node->fRed = true; |
996 | 4.62k | tmp->fRed = false; |
997 | | |
998 | 4.62k | return tmp; |
999 | 4.62k | } |
1000 | | |
1001 | 1.28k | static ActiveEdge* DoubleRotation(ActiveEdge* node, int dir) { |
1002 | 1.28k | node->fChild[!dir] = SingleRotation(node->fChild[!dir], !dir); |
1003 | | |
1004 | 1.28k | return SingleRotation(node, dir); |
1005 | 1.28k | } |
1006 | | |
1007 | | // returns black link count |
1008 | 0 | static int VerifyTree(const ActiveEdge* tree) { |
1009 | 0 | if (!tree) { |
1010 | 0 | return 1; |
1011 | 0 | } |
1012 | | |
1013 | 0 | const ActiveEdge* left = tree->fChild[0]; |
1014 | 0 | const ActiveEdge* right = tree->fChild[1]; |
1015 | | |
1016 | | // no consecutive red links |
1017 | 0 | if (IsRed(tree) && (IsRed(left) || IsRed(right))) { |
1018 | 0 | SkASSERT(false); |
1019 | 0 | return 0; |
1020 | 0 | } |
1021 | | |
1022 | | // check secondary links |
1023 | 0 | if (tree->fAbove) { |
1024 | 0 | SkASSERT(tree->fAbove->fBelow == tree); |
1025 | 0 | SkASSERT(tree->fAbove->lessThan(tree)); |
1026 | 0 | } |
1027 | 0 | if (tree->fBelow) { |
1028 | 0 | SkASSERT(tree->fBelow->fAbove == tree); |
1029 | 0 | SkASSERT(tree->lessThan(tree->fBelow)); |
1030 | 0 | } |
1031 | | |
1032 | | // violates binary tree order |
1033 | 0 | if ((left && tree->lessThan(left)) || (right && right->lessThan(tree))) { |
1034 | 0 | SkASSERT(false); |
1035 | 0 | return 0; |
1036 | 0 | } |
1037 | | |
1038 | 0 | int leftCount = VerifyTree(left); |
1039 | 0 | int rightCount = VerifyTree(right); |
1040 | | |
1041 | | // return black link count |
1042 | 0 | if (leftCount != 0 && rightCount != 0) { |
1043 | | // black height mismatch |
1044 | 0 | if (leftCount != rightCount) { |
1045 | 0 | SkASSERT(false); |
1046 | 0 | return 0; |
1047 | 0 | } |
1048 | 0 | return IsRed(tree) ? leftCount : leftCount + 1; |
1049 | 0 | } else { |
1050 | 0 | return 0; |
1051 | 0 | } |
1052 | 0 | } Unexecuted instantiation: ActiveEdgeList::VerifyTree(ActiveEdge const*) Unexecuted instantiation: ActiveEdgeList::VerifyTree(ActiveEdge const*) |
1053 | | |
1054 | | ActiveEdge fTreeHead; |
1055 | | char* fAllocation; |
1056 | | int fCurrFree; |
1057 | | int fMaxFree; |
1058 | | }; |
1059 | | |
1060 | | // Here we implement a sweep line algorithm to determine whether the provided points |
1061 | | // represent a simple polygon, i.e., the polygon is non-self-intersecting. |
1062 | | // We first insert the vertices into a priority queue sorting horizontally from left to right. |
1063 | | // Then as we pop the vertices from the queue we generate events which indicate that an edge |
1064 | | // should be added or removed from an edge list. If any intersections are detected in the edge |
1065 | | // list, then we know the polygon is self-intersecting and hence not simple. |
1066 | 1.14k | bool SkIsSimplePolygon(const SkPoint* polygon, int polygonSize) { |
1067 | 1.14k | if (polygonSize < 3) { |
1068 | 14 | return false; |
1069 | 14 | } |
1070 | | |
1071 | | // If it's convex, it's simple |
1072 | 1.13k | if (SkIsConvexPolygon(polygon, polygonSize)) { |
1073 | 418 | return true; |
1074 | 418 | } |
1075 | | |
1076 | | // practically speaking, it takes too long to process large polygons |
1077 | 712 | if (polygonSize > 2048) { |
1078 | 31 | return false; |
1079 | 31 | } |
1080 | | |
1081 | 681 | SkTDPQueue <Vertex, Vertex::Left> vertexQueue(polygonSize); |
1082 | 172k | for (int i = 0; i < polygonSize; ++i) { |
1083 | 171k | Vertex newVertex; |
1084 | 171k | if (!polygon[i].isFinite()) { |
1085 | 77 | return false; |
1086 | 77 | } |
1087 | 171k | newVertex.fPosition = polygon[i]; |
1088 | 171k | newVertex.fIndex = i; |
1089 | 171k | newVertex.fPrevIndex = (i - 1 + polygonSize) % polygonSize; |
1090 | 171k | newVertex.fNextIndex = (i + 1) % polygonSize; |
1091 | 171k | newVertex.fFlags = 0; |
1092 | 171k | if (left(polygon[newVertex.fPrevIndex], polygon[i])) { |
1093 | 64.0k | newVertex.fFlags |= kPrevLeft_VertexFlag; |
1094 | 64.0k | } |
1095 | 171k | if (left(polygon[newVertex.fNextIndex], polygon[i])) { |
1096 | 55.5k | newVertex.fFlags |= kNextLeft_VertexFlag; |
1097 | 55.5k | } |
1098 | 171k | vertexQueue.insert(newVertex); |
1099 | 171k | } |
1100 | | |
1101 | | // pop each vertex from the queue and generate events depending on |
1102 | | // where it lies relative to its neighboring edges |
1103 | 604 | ActiveEdgeList sweepLine(polygonSize); |
1104 | 71.8k | while (vertexQueue.count() > 0) { |
1105 | 71.7k | const Vertex& v = vertexQueue.peek(); |
1106 | | |
1107 | | // both to the right -- insert both |
1108 | 71.7k | if (v.fFlags == 0) { |
1109 | 3.76k | if (!sweepLine.insert(v.fPosition, polygon[v.fPrevIndex], v.fIndex, v.fPrevIndex)) { |
1110 | 248 | break; |
1111 | 248 | } |
1112 | 3.51k | if (!sweepLine.insert(v.fPosition, polygon[v.fNextIndex], v.fIndex, v.fNextIndex)) { |
1113 | 137 | break; |
1114 | 137 | } |
1115 | | // both to the left -- remove both |
1116 | 67.9k | } else if (v.fFlags == (kPrevLeft_VertexFlag | kNextLeft_VertexFlag)) { |
1117 | 2.83k | if (!sweepLine.remove(polygon[v.fPrevIndex], v.fPosition, v.fPrevIndex, v.fIndex)) { |
1118 | 9 | break; |
1119 | 9 | } |
1120 | 2.82k | if (!sweepLine.remove(polygon[v.fNextIndex], v.fPosition, v.fNextIndex, v.fIndex)) { |
1121 | 18 | break; |
1122 | 18 | } |
1123 | | // one to left and right -- replace one with another |
1124 | 65.1k | } else { |
1125 | 65.1k | if (v.fFlags & kPrevLeft_VertexFlag) { |
1126 | 33.1k | if (!sweepLine.replace(polygon[v.fPrevIndex], v.fPosition, polygon[v.fNextIndex], |
1127 | 46 | v.fPrevIndex, v.fIndex, v.fNextIndex)) { |
1128 | 46 | break; |
1129 | 46 | } |
1130 | 32.0k | } else { |
1131 | 32.0k | SkASSERT(v.fFlags & kNextLeft_VertexFlag); |
1132 | 32.0k | if (!sweepLine.replace(polygon[v.fNextIndex], v.fPosition, polygon[v.fPrevIndex], |
1133 | 37 | v.fNextIndex, v.fIndex, v.fPrevIndex)) { |
1134 | 37 | break; |
1135 | 37 | } |
1136 | 71.2k | } |
1137 | 65.1k | } |
1138 | | |
1139 | 71.2k | vertexQueue.pop(); |
1140 | 71.2k | } |
1141 | | |
1142 | 604 | return (vertexQueue.count() == 0); |
1143 | 681 | } |
1144 | | |
1145 | | /////////////////////////////////////////////////////////////////////////////////////////// |
1146 | | |
1147 | | // helper function for SkOffsetSimplePolygon |
1148 | | static void setup_offset_edge(OffsetEdge* currEdge, |
1149 | | const SkPoint& endpoint0, const SkPoint& endpoint1, |
1150 | 41.2M | uint16_t startIndex, uint16_t endIndex) { |
1151 | 41.2M | currEdge->fOffset.fP0 = endpoint0; |
1152 | 41.2M | currEdge->fOffset.fV = endpoint1 - endpoint0; |
1153 | 41.2M | currEdge->init(startIndex, endIndex); |
1154 | 41.2M | } |
1155 | | |
1156 | | static bool is_reflex_vertex(const SkPoint* inputPolygonVerts, int winding, SkScalar offset, |
1157 | 27.2k | uint16_t prevIndex, uint16_t currIndex, uint16_t nextIndex) { |
1158 | 27.2k | int side = compute_side(inputPolygonVerts[prevIndex], |
1159 | 27.2k | inputPolygonVerts[currIndex] - inputPolygonVerts[prevIndex], |
1160 | 27.2k | inputPolygonVerts[nextIndex]); |
1161 | | // if reflex point, we need to add extra edges |
1162 | 27.2k | return (side*winding*offset < 0); |
1163 | 27.2k | } |
1164 | | |
1165 | | bool SkOffsetSimplePolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
1166 | | const SkRect& bounds, SkScalar offset, |
1167 | 975 | SkTDArray<SkPoint>* offsetPolygon, SkTDArray<int>* polygonIndices) { |
1168 | 975 | if (inputPolygonSize < 3) { |
1169 | 14 | return false; |
1170 | 14 | } |
1171 | | |
1172 | | // need to be able to represent all the vertices in the 16-bit indices |
1173 | 961 | if (inputPolygonSize >= std::numeric_limits<uint16_t>::max()) { |
1174 | 0 | return false; |
1175 | 0 | } |
1176 | | |
1177 | 961 | if (!SkScalarIsFinite(offset)) { |
1178 | 1 | return false; |
1179 | 1 | } |
1180 | | |
1181 | | // can't inset more than the half bounds of the polygon |
1182 | 960 | if (offset > std::min(SkTAbs(SK_ScalarHalf*bounds.width()), |
1183 | 960 | SkTAbs(SK_ScalarHalf*bounds.height()))) { |
1184 | 5 | return false; |
1185 | 5 | } |
1186 | | |
1187 | | // offsetting close to zero just returns the original poly |
1188 | 955 | if (SkScalarNearlyZero(offset)) { |
1189 | 126k | for (int i = 0; i < inputPolygonSize; ++i) { |
1190 | 125k | *offsetPolygon->push() = inputPolygonVerts[i]; |
1191 | 125k | if (polygonIndices) { |
1192 | 0 | *polygonIndices->push() = i; |
1193 | 0 | } |
1194 | 125k | } |
1195 | 581 | return true; |
1196 | 581 | } |
1197 | | |
1198 | | // get winding direction |
1199 | 374 | int winding = SkGetPolygonWinding(inputPolygonVerts, inputPolygonSize); |
1200 | 374 | if (0 == winding) { |
1201 | 2 | return false; |
1202 | 2 | } |
1203 | | |
1204 | | // build normals |
1205 | 372 | SkAutoSTMalloc<64, SkVector> normals(inputPolygonSize); |
1206 | 372 | unsigned int numEdges = 0; |
1207 | 372 | for (int currIndex = 0, prevIndex = inputPolygonSize - 1; |
1208 | 14.0k | currIndex < inputPolygonSize; |
1209 | 13.7k | prevIndex = currIndex, ++currIndex) { |
1210 | 13.7k | if (!inputPolygonVerts[currIndex].isFinite()) { |
1211 | 1 | return false; |
1212 | 1 | } |
1213 | 13.7k | int nextIndex = (currIndex + 1) % inputPolygonSize; |
1214 | 13.7k | if (!compute_offset_vector(inputPolygonVerts[currIndex], inputPolygonVerts[nextIndex], |
1215 | 14 | offset, winding, &normals[currIndex])) { |
1216 | 14 | return false; |
1217 | 14 | } |
1218 | 13.7k | if (currIndex > 0) { |
1219 | | // if reflex point, we need to add extra edges |
1220 | 13.3k | if (is_reflex_vertex(inputPolygonVerts, winding, offset, |
1221 | 8.29k | prevIndex, currIndex, nextIndex)) { |
1222 | 8.29k | SkScalar rotSin, rotCos; |
1223 | 8.29k | int numSteps; |
1224 | 8.29k | if (!SkComputeRadialSteps(normals[prevIndex], normals[currIndex], offset, |
1225 | 8 | &rotSin, &rotCos, &numSteps)) { |
1226 | 8 | return false; |
1227 | 8 | } |
1228 | 8.28k | numEdges += std::max(numSteps, 1); |
1229 | 8.28k | } |
1230 | 13.3k | } |
1231 | 13.6k | numEdges++; |
1232 | 13.6k | } |
1233 | | // finish up the edge counting |
1234 | 349 | if (is_reflex_vertex(inputPolygonVerts, winding, offset, inputPolygonSize-1, 0, 1)) { |
1235 | 178 | SkScalar rotSin, rotCos; |
1236 | 178 | int numSteps; |
1237 | 178 | if (!SkComputeRadialSteps(normals[inputPolygonSize-1], normals[0], offset, |
1238 | 1 | &rotSin, &rotCos, &numSteps)) { |
1239 | 1 | return false; |
1240 | 1 | } |
1241 | 177 | numEdges += std::max(numSteps, 1); |
1242 | 177 | } |
1243 | | |
1244 | | // Make sure we don't overflow the max array count. |
1245 | | // We shouldn't overflow numEdges, as SkComputeRadialSteps returns a max of 2^16-1, |
1246 | | // and we have a max of 2^16-1 original vertices. |
1247 | 348 | if (numEdges > (unsigned int)std::numeric_limits<int32_t>::max()) { |
1248 | 0 | return false; |
1249 | 0 | } |
1250 | | |
1251 | | // build initial offset edge list |
1252 | 348 | SkSTArray<64, OffsetEdge> edgeData(numEdges); |
1253 | 348 | OffsetEdge* prevEdge = nullptr; |
1254 | 348 | for (int currIndex = 0, prevIndex = inputPolygonSize - 1; |
1255 | 13.9k | currIndex < inputPolygonSize; |
1256 | 13.5k | prevIndex = currIndex, ++currIndex) { |
1257 | 13.5k | int nextIndex = (currIndex + 1) % inputPolygonSize; |
1258 | | // if reflex point, fill in curve |
1259 | 13.5k | if (is_reflex_vertex(inputPolygonVerts, winding, offset, |
1260 | 8.40k | prevIndex, currIndex, nextIndex)) { |
1261 | 8.40k | SkScalar rotSin, rotCos; |
1262 | 8.40k | int numSteps; |
1263 | 8.40k | SkVector prevNormal = normals[prevIndex]; |
1264 | 8.40k | if (!SkComputeRadialSteps(prevNormal, normals[currIndex], offset, |
1265 | 0 | &rotSin, &rotCos, &numSteps)) { |
1266 | 0 | return false; |
1267 | 0 | } |
1268 | 8.40k | auto currEdge = edgeData.push_back_n(std::max(numSteps, 1)); |
1269 | 41.2M | for (int i = 0; i < numSteps - 1; ++i) { |
1270 | 41.2M | SkVector currNormal = SkVector::Make(prevNormal.fX*rotCos - prevNormal.fY*rotSin, |
1271 | 41.2M | prevNormal.fY*rotCos + prevNormal.fX*rotSin); |
1272 | 41.2M | setup_offset_edge(currEdge, |
1273 | 41.2M | inputPolygonVerts[currIndex] + prevNormal, |
1274 | 41.2M | inputPolygonVerts[currIndex] + currNormal, |
1275 | 41.2M | currIndex, currIndex); |
1276 | 41.2M | prevNormal = currNormal; |
1277 | 41.2M | currEdge->fPrev = prevEdge; |
1278 | 41.2M | if (prevEdge) { |
1279 | 41.2M | prevEdge->fNext = currEdge; |
1280 | 41.2M | } |
1281 | 41.2M | prevEdge = currEdge; |
1282 | 41.2M | ++currEdge; |
1283 | 41.2M | } |
1284 | 8.40k | setup_offset_edge(currEdge, |
1285 | 8.40k | inputPolygonVerts[currIndex] + prevNormal, |
1286 | 8.40k | inputPolygonVerts[currIndex] + normals[currIndex], |
1287 | 8.40k | currIndex, currIndex); |
1288 | 8.40k | currEdge->fPrev = prevEdge; |
1289 | 8.40k | if (prevEdge) { |
1290 | 8.37k | prevEdge->fNext = currEdge; |
1291 | 8.37k | } |
1292 | 8.40k | prevEdge = currEdge; |
1293 | 8.40k | } |
1294 | | |
1295 | | // Add the edge |
1296 | 13.5k | auto currEdge = edgeData.push_back_n(1); |
1297 | 13.5k | setup_offset_edge(currEdge, |
1298 | 13.5k | inputPolygonVerts[currIndex] + normals[currIndex], |
1299 | 13.5k | inputPolygonVerts[nextIndex] + normals[currIndex], |
1300 | 13.5k | currIndex, nextIndex); |
1301 | 13.5k | currEdge->fPrev = prevEdge; |
1302 | 13.5k | if (prevEdge) { |
1303 | 13.4k | prevEdge->fNext = currEdge; |
1304 | 13.4k | } |
1305 | 13.5k | prevEdge = currEdge; |
1306 | 13.5k | } |
1307 | | // close up the linked list |
1308 | 348 | SkASSERT(prevEdge); |
1309 | 348 | prevEdge->fNext = &edgeData[0]; |
1310 | 348 | edgeData[0].fPrev = prevEdge; |
1311 | | |
1312 | | // now clip edges |
1313 | 348 | SkASSERT(edgeData.count() == (int)numEdges); |
1314 | 348 | auto head = &edgeData[0]; |
1315 | 348 | auto currEdge = head; |
1316 | 348 | unsigned int offsetVertexCount = numEdges; |
1317 | 348 | unsigned long long iterations = 0; |
1318 | 348 | unsigned long long maxIterations = (unsigned long long)(numEdges) * numEdges; |
1319 | 190M | while (head && prevEdge != currEdge && offsetVertexCount > 0) { |
1320 | 190M | ++iterations; |
1321 | | // we should check each edge against each other edge at most once |
1322 | 190M | if (iterations > maxIterations) { |
1323 | 30 | return false; |
1324 | 30 | } |
1325 | | |
1326 | 190M | SkScalar s, t; |
1327 | 190M | SkPoint intersection; |
1328 | 190M | if (prevEdge->checkIntersection(currEdge, &intersection, &s, &t)) { |
1329 | | // if new intersection is further back on previous inset from the prior intersection |
1330 | 179M | if (s < prevEdge->fTValue) { |
1331 | | // no point in considering this one again |
1332 | 274 | remove_node(prevEdge, &head); |
1333 | 274 | --offsetVertexCount; |
1334 | | // go back one segment |
1335 | 274 | prevEdge = prevEdge->fPrev; |
1336 | | // we've already considered this intersection, we're done |
1337 | 179M | } else if (currEdge->fTValue > SK_ScalarMin && |
1338 | 74.0M | SkPointPriv::EqualsWithinTolerance(intersection, |
1339 | 74.0M | currEdge->fIntersection, |
1340 | 310 | 1.0e-6f)) { |
1341 | 310 | break; |
1342 | 179M | } else { |
1343 | | // add intersection |
1344 | 179M | currEdge->fIntersection = intersection; |
1345 | 179M | currEdge->fTValue = t; |
1346 | 179M | currEdge->fIndex = prevEdge->fEnd; |
1347 | | |
1348 | | // go to next segment |
1349 | 179M | prevEdge = currEdge; |
1350 | 179M | currEdge = currEdge->fNext; |
1351 | 179M | } |
1352 | 11.0M | } else { |
1353 | | // If there is no intersection, we want to minimize the distance between |
1354 | | // the point where the segment lines cross and the segments themselves. |
1355 | 11.0M | OffsetEdge* prevPrevEdge = prevEdge->fPrev; |
1356 | 11.0M | OffsetEdge* currNextEdge = currEdge->fNext; |
1357 | 11.0M | SkScalar dist0 = currEdge->computeCrossingDistance(prevPrevEdge); |
1358 | 11.0M | SkScalar dist1 = prevEdge->computeCrossingDistance(currNextEdge); |
1359 | | // if both lead to direct collision |
1360 | 11.0M | if (dist0 < 0 && dist1 < 0) { |
1361 | | // check first to see if either represent parts of one contour |
1362 | 30.6k | SkPoint p1 = prevPrevEdge->fOffset.fP0 + prevPrevEdge->fOffset.fV; |
1363 | 30.6k | bool prevSameContour = SkPointPriv::EqualsWithinTolerance(p1, |
1364 | 30.6k | prevEdge->fOffset.fP0); |
1365 | 30.6k | p1 = currEdge->fOffset.fP0 + currEdge->fOffset.fV; |
1366 | 30.6k | bool currSameContour = SkPointPriv::EqualsWithinTolerance(p1, |
1367 | 30.6k | currNextEdge->fOffset.fP0); |
1368 | | |
1369 | | // want to step along contour to find intersections rather than jump to new one |
1370 | 30.6k | if (currSameContour && !prevSameContour) { |
1371 | 7.05k | remove_node(currEdge, &head); |
1372 | 7.05k | currEdge = currNextEdge; |
1373 | 7.05k | --offsetVertexCount; |
1374 | 7.05k | continue; |
1375 | 23.5k | } else if (prevSameContour && !currSameContour) { |
1376 | 449 | remove_node(prevEdge, &head); |
1377 | 449 | prevEdge = prevPrevEdge; |
1378 | 449 | --offsetVertexCount; |
1379 | 449 | continue; |
1380 | 449 | } |
1381 | 11.0M | } |
1382 | | |
1383 | | // otherwise minimize collision distance along segment |
1384 | 11.0M | if (dist0 < dist1) { |
1385 | 1.01M | remove_node(prevEdge, &head); |
1386 | 1.01M | prevEdge = prevPrevEdge; |
1387 | 10.0M | } else { |
1388 | 10.0M | remove_node(currEdge, &head); |
1389 | 10.0M | currEdge = currNextEdge; |
1390 | 10.0M | } |
1391 | 11.0M | --offsetVertexCount; |
1392 | 11.0M | } |
1393 | 190M | } |
1394 | | |
1395 | | // store all the valid intersections that aren't nearly coincident |
1396 | | // TODO: look at the main algorithm and see if we can detect these better |
1397 | 318 | offsetPolygon->reset(); |
1398 | 318 | if (!head || offsetVertexCount == 0 || |
1399 | 318 | offsetVertexCount >= std::numeric_limits<uint16_t>::max()) { |
1400 | 63 | return false; |
1401 | 63 | } |
1402 | | |
1403 | 255 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
1404 | 255 | offsetPolygon->setReserve(offsetVertexCount); |
1405 | 255 | int currIndex = 0; |
1406 | 255 | *offsetPolygon->push() = head->fIntersection; |
1407 | 255 | if (polygonIndices) { |
1408 | 0 | *polygonIndices->push() = head->fIndex; |
1409 | 0 | } |
1410 | 255 | currEdge = head->fNext; |
1411 | 1.24M | while (currEdge != head) { |
1412 | 1.24M | if (!SkPointPriv::EqualsWithinTolerance(currEdge->fIntersection, |
1413 | 1.24M | (*offsetPolygon)[currIndex], |
1414 | 1.07M | kCleanupTolerance)) { |
1415 | 1.07M | *offsetPolygon->push() = currEdge->fIntersection; |
1416 | 1.07M | if (polygonIndices) { |
1417 | 0 | *polygonIndices->push() = currEdge->fIndex; |
1418 | 0 | } |
1419 | 1.07M | currIndex++; |
1420 | 1.07M | } |
1421 | 1.24M | currEdge = currEdge->fNext; |
1422 | 1.24M | } |
1423 | | // make sure the first and last points aren't coincident |
1424 | 255 | if (currIndex >= 1 && |
1425 | 214 | SkPointPriv::EqualsWithinTolerance((*offsetPolygon)[0], (*offsetPolygon)[currIndex], |
1426 | 12 | kCleanupTolerance)) { |
1427 | 12 | offsetPolygon->pop(); |
1428 | 12 | if (polygonIndices) { |
1429 | 0 | polygonIndices->pop(); |
1430 | 0 | } |
1431 | 12 | } |
1432 | | |
1433 | | // check winding of offset polygon (it should be same as the original polygon) |
1434 | 255 | SkScalar offsetWinding = SkGetPolygonWinding(offsetPolygon->begin(), offsetPolygon->count()); |
1435 | | |
1436 | 255 | return (winding*offsetWinding > 0 && |
1437 | 169 | SkIsSimplePolygon(offsetPolygon->begin(), offsetPolygon->count())); |
1438 | 255 | } |
1439 | | |
1440 | | ////////////////////////////////////////////////////////////////////////////////////////// |
1441 | | |
1442 | | struct TriangulationVertex { |
1443 | | SK_DECLARE_INTERNAL_LLIST_INTERFACE(TriangulationVertex); |
1444 | | |
1445 | | enum class VertexType { kConvex, kReflex }; |
1446 | | |
1447 | | SkPoint fPosition; |
1448 | | VertexType fVertexType; |
1449 | | uint16_t fIndex; |
1450 | | uint16_t fPrevIndex; |
1451 | | uint16_t fNextIndex; |
1452 | | }; |
1453 | | |
1454 | | static void compute_triangle_bounds(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
1455 | 47.9k | SkRect* bounds) { |
1456 | 47.9k | Sk4s min, max; |
1457 | 47.9k | min = max = Sk4s(p0.fX, p0.fY, p0.fX, p0.fY); |
1458 | 47.9k | Sk4s xy(p1.fX, p1.fY, p2.fX, p2.fY); |
1459 | 47.9k | min = Sk4s::Min(min, xy); |
1460 | 47.9k | max = Sk4s::Max(max, xy); |
1461 | 47.9k | bounds->setLTRB(std::min(min[0], min[2]), std::min(min[1], min[3]), |
1462 | 47.9k | std::max(max[0], max[2]), std::max(max[1], max[3])); |
1463 | 47.9k | } |
1464 | | |
1465 | | // test to see if point p is in triangle p0p1p2. |
1466 | | // for now assuming strictly inside -- if on the edge it's outside |
1467 | | static bool point_in_triangle(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
1468 | 1.91M | const SkPoint& p) { |
1469 | 1.91M | SkVector v0 = p1 - p0; |
1470 | 1.91M | SkVector v1 = p2 - p1; |
1471 | 1.91M | SkScalar n = v0.cross(v1); |
1472 | | |
1473 | 1.91M | SkVector w0 = p - p0; |
1474 | 1.91M | if (n*v0.cross(w0) < SK_ScalarNearlyZero) { |
1475 | 984k | return false; |
1476 | 984k | } |
1477 | | |
1478 | 934k | SkVector w1 = p - p1; |
1479 | 934k | if (n*v1.cross(w1) < SK_ScalarNearlyZero) { |
1480 | 603k | return false; |
1481 | 603k | } |
1482 | | |
1483 | 331k | SkVector v2 = p0 - p2; |
1484 | 331k | SkVector w2 = p - p2; |
1485 | 331k | if (n*v2.cross(w2) < SK_ScalarNearlyZero) { |
1486 | 287k | return false; |
1487 | 287k | } |
1488 | | |
1489 | 43.9k | return true; |
1490 | 43.9k | } |
1491 | | |
1492 | | // Data structure to track reflex vertices and check whether any are inside a given triangle |
1493 | | class ReflexHash { |
1494 | | public: |
1495 | 812 | bool init(const SkRect& bounds, int vertexCount) { |
1496 | 812 | fBounds = bounds; |
1497 | 812 | fNumVerts = 0; |
1498 | 812 | SkScalar width = bounds.width(); |
1499 | 812 | SkScalar height = bounds.height(); |
1500 | 812 | if (!SkScalarIsFinite(width) || !SkScalarIsFinite(height)) { |
1501 | 44 | return false; |
1502 | 44 | } |
1503 | | |
1504 | | // We want vertexCount grid cells, roughly distributed to match the bounds ratio |
1505 | 768 | SkScalar hCount = SkScalarSqrt(sk_ieee_float_divide(vertexCount*width, height)); |
1506 | 768 | if (!SkScalarIsFinite(hCount)) { |
1507 | 115 | return false; |
1508 | 115 | } |
1509 | 653 | fHCount = std::max(std::min(SkScalarRoundToInt(hCount), vertexCount), 1); |
1510 | 653 | fVCount = vertexCount/fHCount; |
1511 | 653 | fGridConversion.set(sk_ieee_float_divide(fHCount - 0.001f, width), |
1512 | 653 | sk_ieee_float_divide(fVCount - 0.001f, height)); |
1513 | 653 | if (!fGridConversion.isFinite()) { |
1514 | 5 | return false; |
1515 | 5 | } |
1516 | | |
1517 | 648 | fGrid.setCount(fHCount*fVCount); |
1518 | 77.4k | for (int i = 0; i < fGrid.count(); ++i) { |
1519 | 76.7k | fGrid[i].reset(); |
1520 | 76.7k | } |
1521 | | |
1522 | 648 | return true; |
1523 | 648 | } |
1524 | | |
1525 | 71.2k | void add(TriangulationVertex* v) { |
1526 | 71.2k | int index = hash(v); |
1527 | 71.2k | fGrid[index].addToTail(v); |
1528 | 71.2k | ++fNumVerts; |
1529 | 71.2k | } |
1530 | | |
1531 | 1.19k | void remove(TriangulationVertex* v) { |
1532 | 1.19k | int index = hash(v); |
1533 | 1.19k | fGrid[index].remove(v); |
1534 | 1.19k | --fNumVerts; |
1535 | 1.19k | } |
1536 | | |
1537 | | bool checkTriangle(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
1538 | 48.6k | uint16_t ignoreIndex0, uint16_t ignoreIndex1) const { |
1539 | 48.6k | if (!fNumVerts) { |
1540 | 649 | return false; |
1541 | 649 | } |
1542 | | |
1543 | 47.9k | SkRect triBounds; |
1544 | 47.9k | compute_triangle_bounds(p0, p1, p2, &triBounds); |
1545 | 47.9k | int h0 = (triBounds.fLeft - fBounds.fLeft)*fGridConversion.fX; |
1546 | 47.9k | int h1 = (triBounds.fRight - fBounds.fLeft)*fGridConversion.fX; |
1547 | 47.9k | int v0 = (triBounds.fTop - fBounds.fTop)*fGridConversion.fY; |
1548 | 47.9k | int v1 = (triBounds.fBottom - fBounds.fTop)*fGridConversion.fY; |
1549 | | |
1550 | 381k | for (int v = v0; v <= v1; ++v) { |
1551 | 1.03M | for (int h = h0; h <= h1; ++h) { |
1552 | 700k | int i = v * fHCount + h; |
1553 | 700k | for (SkTInternalLList<TriangulationVertex>::Iter reflexIter = fGrid[i].begin(); |
1554 | 2.58M | reflexIter != fGrid[i].end(); ++reflexIter) { |
1555 | 1.92M | TriangulationVertex* reflexVertex = *reflexIter; |
1556 | 1.92M | if (reflexVertex->fIndex != ignoreIndex0 && |
1557 | 1.92M | reflexVertex->fIndex != ignoreIndex1 && |
1558 | 1.91M | point_in_triangle(p0, p1, p2, reflexVertex->fPosition)) { |
1559 | 43.9k | return true; |
1560 | 43.9k | } |
1561 | 1.92M | } |
1562 | | |
1563 | 700k | } |
1564 | 377k | } |
1565 | | |
1566 | 4.02k | return false; |
1567 | 47.9k | } |
1568 | | |
1569 | | private: |
1570 | 72.4k | int hash(TriangulationVertex* vert) const { |
1571 | 72.4k | int h = (vert->fPosition.fX - fBounds.fLeft)*fGridConversion.fX; |
1572 | 72.4k | int v = (vert->fPosition.fY - fBounds.fTop)*fGridConversion.fY; |
1573 | 72.4k | SkASSERT(v*fHCount + h >= 0); |
1574 | 72.4k | return v*fHCount + h; |
1575 | 72.4k | } |
1576 | | |
1577 | | SkRect fBounds; |
1578 | | int fHCount; |
1579 | | int fVCount; |
1580 | | int fNumVerts; |
1581 | | // converts distance from the origin to a grid location (when cast to int) |
1582 | | SkVector fGridConversion; |
1583 | | SkTDArray<SkTInternalLList<TriangulationVertex>> fGrid; |
1584 | | }; |
1585 | | |
1586 | | // Check to see if a reflex vertex has become a convex vertex after clipping an ear |
1587 | | static void reclassify_vertex(TriangulationVertex* p, const SkPoint* polygonVerts, |
1588 | | int winding, ReflexHash* reflexHash, |
1589 | 9.34k | SkTInternalLList<TriangulationVertex>* convexList) { |
1590 | 9.34k | if (TriangulationVertex::VertexType::kReflex == p->fVertexType) { |
1591 | 4.11k | SkVector v0 = p->fPosition - polygonVerts[p->fPrevIndex]; |
1592 | 4.11k | SkVector v1 = polygonVerts[p->fNextIndex] - p->fPosition; |
1593 | 4.11k | if (winding*v0.cross(v1) > SK_ScalarNearlyZero*SK_ScalarNearlyZero) { |
1594 | 1.19k | p->fVertexType = TriangulationVertex::VertexType::kConvex; |
1595 | 1.19k | reflexHash->remove(p); |
1596 | 1.19k | p->fPrev = p->fNext = nullptr; |
1597 | 1.19k | convexList->addToTail(p); |
1598 | 1.19k | } |
1599 | 4.11k | } |
1600 | 9.34k | } |
1601 | | |
1602 | | bool SkTriangulateSimplePolygon(const SkPoint* polygonVerts, uint16_t* indexMap, int polygonSize, |
1603 | 975 | SkTDArray<uint16_t>* triangleIndices) { |
1604 | 975 | if (polygonSize < 3) { |
1605 | 14 | return false; |
1606 | 14 | } |
1607 | | // need to be able to represent all the vertices in the 16-bit indices |
1608 | 961 | if (polygonSize >= std::numeric_limits<uint16_t>::max()) { |
1609 | 0 | return false; |
1610 | 0 | } |
1611 | | |
1612 | | // get bounds |
1613 | 961 | SkRect bounds; |
1614 | 961 | if (!bounds.setBoundsCheck(polygonVerts, polygonSize)) { |
1615 | 50 | return false; |
1616 | 50 | } |
1617 | | // get winding direction |
1618 | | // TODO: we do this for all the polygon routines -- might be better to have the client |
1619 | | // compute it and pass it in |
1620 | 911 | int winding = SkGetPolygonWinding(polygonVerts, polygonSize); |
1621 | 911 | if (0 == winding) { |
1622 | 99 | return false; |
1623 | 99 | } |
1624 | | |
1625 | | // Set up vertices |
1626 | 812 | SkAutoSTArray<64, TriangulationVertex> triangulationVertices(polygonSize); |
1627 | 812 | int prevIndex = polygonSize - 1; |
1628 | 812 | SkVector v0 = polygonVerts[0] - polygonVerts[prevIndex]; |
1629 | 98.6k | for (int currIndex = 0; currIndex < polygonSize; ++currIndex) { |
1630 | 97.8k | int nextIndex = (currIndex + 1) % polygonSize; |
1631 | | |
1632 | 97.8k | triangulationVertices[currIndex] = TriangulationVertex{}; |
1633 | 97.8k | triangulationVertices[currIndex].fPosition = polygonVerts[currIndex]; |
1634 | 97.8k | triangulationVertices[currIndex].fIndex = currIndex; |
1635 | 97.8k | triangulationVertices[currIndex].fPrevIndex = prevIndex; |
1636 | 97.8k | triangulationVertices[currIndex].fNextIndex = nextIndex; |
1637 | 97.8k | SkVector v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
1638 | 97.8k | if (winding*v0.cross(v1) > SK_ScalarNearlyZero*SK_ScalarNearlyZero) { |
1639 | 10.3k | triangulationVertices[currIndex].fVertexType = TriangulationVertex::VertexType::kConvex; |
1640 | 87.4k | } else { |
1641 | 87.4k | triangulationVertices[currIndex].fVertexType = TriangulationVertex::VertexType::kReflex; |
1642 | 87.4k | } |
1643 | | |
1644 | 97.8k | prevIndex = currIndex; |
1645 | 97.8k | v0 = v1; |
1646 | 97.8k | } |
1647 | | |
1648 | | // Classify initial vertices into a list of convex vertices and a hash of reflex vertices |
1649 | | // TODO: possibly sort the convexList in some way to get better triangles |
1650 | 812 | SkTInternalLList<TriangulationVertex> convexList; |
1651 | 812 | ReflexHash reflexHash; |
1652 | 812 | if (!reflexHash.init(bounds, polygonSize)) { |
1653 | 164 | return false; |
1654 | 164 | } |
1655 | 648 | prevIndex = polygonSize - 1; |
1656 | 78.7k | for (int currIndex = 0; currIndex < polygonSize; prevIndex = currIndex, ++currIndex) { |
1657 | 78.0k | TriangulationVertex::VertexType currType = triangulationVertices[currIndex].fVertexType; |
1658 | 78.0k | if (TriangulationVertex::VertexType::kConvex == currType) { |
1659 | 6.83k | int nextIndex = (currIndex + 1) % polygonSize; |
1660 | 6.83k | TriangulationVertex::VertexType prevType = triangulationVertices[prevIndex].fVertexType; |
1661 | 6.83k | TriangulationVertex::VertexType nextType = triangulationVertices[nextIndex].fVertexType; |
1662 | | // We prioritize clipping vertices with neighboring reflex vertices. |
1663 | | // The intent here is that it will cull reflex vertices more quickly. |
1664 | 6.83k | if (TriangulationVertex::VertexType::kReflex == prevType || |
1665 | 4.87k | TriangulationVertex::VertexType::kReflex == nextType) { |
1666 | 3.21k | convexList.addToHead(&triangulationVertices[currIndex]); |
1667 | 3.62k | } else { |
1668 | 3.62k | convexList.addToTail(&triangulationVertices[currIndex]); |
1669 | 3.62k | } |
1670 | 71.2k | } else { |
1671 | | // We treat near collinear vertices as reflex |
1672 | 71.2k | reflexHash.add(&triangulationVertices[currIndex]); |
1673 | 71.2k | } |
1674 | 78.0k | } |
1675 | | |
1676 | | // The general concept: We are trying to find three neighboring vertices where |
1677 | | // no other vertex lies inside the triangle (an "ear"). If we find one, we clip |
1678 | | // that ear off, and then repeat on the new polygon. Once we get down to three vertices |
1679 | | // we have triangulated the entire polygon. |
1680 | | // In the worst case this is an n^2 algorithm. We can cut down the search space somewhat by |
1681 | | // noting that only convex vertices can be potential ears, and we only need to check whether |
1682 | | // any reflex vertices lie inside the ear. |
1683 | 648 | triangleIndices->setReserve(triangleIndices->count() + 3 * (polygonSize - 2)); |
1684 | 648 | int vertexCount = polygonSize; |
1685 | 5.32k | while (vertexCount > 3) { |
1686 | 5.00k | bool success = false; |
1687 | 5.00k | TriangulationVertex* earVertex = nullptr; |
1688 | 5.00k | TriangulationVertex* p0 = nullptr; |
1689 | 5.00k | TriangulationVertex* p2 = nullptr; |
1690 | | // find a convex vertex to clip |
1691 | 5.00k | for (SkTInternalLList<TriangulationVertex>::Iter convexIter = convexList.begin(); |
1692 | 48.9k | convexIter != convexList.end(); ++convexIter) { |
1693 | 48.6k | earVertex = *convexIter; |
1694 | 48.6k | SkASSERT(TriangulationVertex::VertexType::kReflex != earVertex->fVertexType); |
1695 | | |
1696 | 48.6k | p0 = &triangulationVertices[earVertex->fPrevIndex]; |
1697 | 48.6k | p2 = &triangulationVertices[earVertex->fNextIndex]; |
1698 | | |
1699 | | // see if any reflex vertices are inside the ear |
1700 | 48.6k | bool failed = reflexHash.checkTriangle(p0->fPosition, earVertex->fPosition, |
1701 | 48.6k | p2->fPosition, p0->fIndex, p2->fIndex); |
1702 | 48.6k | if (failed) { |
1703 | 43.9k | continue; |
1704 | 43.9k | } |
1705 | | |
1706 | | // found one we can clip |
1707 | 4.67k | success = true; |
1708 | 4.67k | break; |
1709 | 4.67k | } |
1710 | | // If we can't find any ears to clip, this probably isn't a simple polygon |
1711 | 5.00k | if (!success) { |
1712 | 337 | return false; |
1713 | 337 | } |
1714 | | |
1715 | | // add indices |
1716 | 4.67k | auto indices = triangleIndices->append(3); |
1717 | 4.67k | indices[0] = indexMap[p0->fIndex]; |
1718 | 4.67k | indices[1] = indexMap[earVertex->fIndex]; |
1719 | 4.67k | indices[2] = indexMap[p2->fIndex]; |
1720 | | |
1721 | | // clip the ear |
1722 | 4.67k | convexList.remove(earVertex); |
1723 | 4.67k | --vertexCount; |
1724 | | |
1725 | | // reclassify reflex verts |
1726 | 4.67k | p0->fNextIndex = earVertex->fNextIndex; |
1727 | 4.67k | reclassify_vertex(p0, polygonVerts, winding, &reflexHash, &convexList); |
1728 | | |
1729 | 4.67k | p2->fPrevIndex = earVertex->fPrevIndex; |
1730 | 4.67k | reclassify_vertex(p2, polygonVerts, winding, &reflexHash, &convexList); |
1731 | 4.67k | } |
1732 | | |
1733 | | // output indices |
1734 | 311 | for (SkTInternalLList<TriangulationVertex>::Iter vertexIter = convexList.begin(); |
1735 | 945 | vertexIter != convexList.end(); ++vertexIter) { |
1736 | 634 | TriangulationVertex* vertex = *vertexIter; |
1737 | 634 | *triangleIndices->push() = indexMap[vertex->fIndex]; |
1738 | 634 | } |
1739 | | |
1740 | 311 | return true; |
1741 | 648 | } |
1742 | | |
1743 | | /////////// |
1744 | | |
1745 | 0 | static double crs(SkVector a, SkVector b) { |
1746 | 0 | return a.fX * b.fY - a.fY * b.fX; |
1747 | 0 | } |
1748 | | |
1749 | 0 | static int sign(SkScalar v) { |
1750 | 0 | return v < 0 ? -1 : (v > 0); |
1751 | 0 | } |
1752 | | |
1753 | | struct SignTracker { |
1754 | | int fSign; |
1755 | | int fSignChanges; |
1756 | | |
1757 | 0 | void reset() { |
1758 | 0 | fSign = 0; |
1759 | 0 | fSignChanges = 0; |
1760 | 0 | } |
1761 | | |
1762 | 0 | void init(int s) { |
1763 | 0 | SkASSERT(fSignChanges == 0); |
1764 | 0 | SkASSERT(s == 1 || s == -1 || s == 0); |
1765 | 0 | fSign = s; |
1766 | 0 | fSignChanges = 1; |
1767 | 0 | } |
1768 | | |
1769 | 0 | void update(int s) { |
1770 | 0 | if (s) { |
1771 | 0 | if (fSign != s) { |
1772 | 0 | fSignChanges += 1; |
1773 | 0 | fSign = s; |
1774 | 0 | } |
1775 | 0 | } |
1776 | 0 | } |
1777 | | }; |
1778 | | |
1779 | | struct ConvexTracker { |
1780 | | SkVector fFirst, fPrev; |
1781 | | SignTracker fDSign, fCSign; |
1782 | | int fVecCounter; |
1783 | | bool fIsConcave; |
1784 | | |
1785 | 0 | ConvexTracker() { this->reset(); } |
1786 | | |
1787 | 0 | void reset() { |
1788 | 0 | fPrev = {0, 0}; |
1789 | 0 | fDSign.reset(); |
1790 | 0 | fCSign.reset(); |
1791 | 0 | fVecCounter = 0; |
1792 | 0 | fIsConcave = false; |
1793 | 0 | } |
1794 | | |
1795 | 0 | void addVec(SkPoint p1, SkPoint p0) { |
1796 | 0 | this->addVec(p1 - p0); |
1797 | 0 | } |
1798 | 0 | void addVec(SkVector v) { |
1799 | 0 | if (v.fX == 0 && v.fY == 0) { |
1800 | 0 | return; |
1801 | 0 | } |
1802 | | |
1803 | 0 | fVecCounter += 1; |
1804 | 0 | if (fVecCounter == 1) { |
1805 | 0 | fFirst = fPrev = v; |
1806 | 0 | fDSign.update(sign(v.fX)); |
1807 | 0 | return; |
1808 | 0 | } |
1809 | | |
1810 | 0 | SkScalar d = v.fX; |
1811 | 0 | SkScalar c = crs(fPrev, v); |
1812 | 0 | int sign_c; |
1813 | 0 | if (c) { |
1814 | 0 | sign_c = sign(c); |
1815 | 0 | } else { |
1816 | 0 | if (d >= 0) { |
1817 | 0 | sign_c = fCSign.fSign; |
1818 | 0 | } else { |
1819 | 0 | sign_c = -fCSign.fSign; |
1820 | 0 | } |
1821 | 0 | } |
1822 | |
|
1823 | 0 | fDSign.update(sign(d)); |
1824 | 0 | fCSign.update(sign_c); |
1825 | 0 | fPrev = v; |
1826 | |
|
1827 | 0 | if (fDSign.fSignChanges > 3 || fCSign.fSignChanges > 1) { |
1828 | 0 | fIsConcave = true; |
1829 | 0 | } |
1830 | 0 | } |
1831 | | |
1832 | 0 | void finalCross() { |
1833 | 0 | this->addVec(fFirst); |
1834 | 0 | } |
1835 | | }; |
1836 | | |
1837 | 0 | bool SkIsPolyConvex_experimental(const SkPoint pts[], int count) { |
1838 | 0 | if (count <= 3) { |
1839 | 0 | return true; |
1840 | 0 | } |
1841 | | |
1842 | 0 | ConvexTracker tracker; |
1843 | |
|
1844 | 0 | for (int i = 0; i < count - 1; ++i) { |
1845 | 0 | tracker.addVec(pts[i + 1], pts[i]); |
1846 | 0 | if (tracker.fIsConcave) { |
1847 | 0 | return false; |
1848 | 0 | } |
1849 | 0 | } |
1850 | 0 | tracker.addVec(pts[0], pts[count - 1]); |
1851 | 0 | tracker.finalCross(); |
1852 | 0 | return !tracker.fIsConcave; |
1853 | 0 | } |
1854 | | |