1 | /* |
2 | * Copyright 2017 Google Inc. |
3 | * |
4 | * Use of this source code is governed by a BSD-style license that can be |
5 | * found in the LICENSE file. |
6 | */ |
7 | |
8 | #include "src/utils/SkPolyUtils.h" |
9 | |
10 | #include <limits> |
11 | |
12 | #include "include/private/SkNx.h" |
13 | #include "include/private/SkTArray.h" |
14 | #include "include/private/SkTemplates.h" |
15 | #include "src/core/SkPointPriv.h" |
16 | #include "src/core/SkTDPQueue.h" |
17 | #include "src/core/SkTInternalLList.h" |
18 | |
19 | ////////////////////////////////////////////////////////////////////////////////// |
20 | // Helper data structures and functions |
21 | |
22 | struct OffsetSegment { |
23 | SkPoint fP0; |
24 | SkVector fV; |
25 | }; |
26 | |
27 | constexpr SkScalar kCrossTolerance = SK_ScalarNearlyZero * SK_ScalarNearlyZero; |
28 | |
29 | // Computes perpDot for point p compared to segment defined by origin p0 and vector v. |
30 | // A positive value means the point is to the left of the segment, |
31 | // negative is to the right, 0 is collinear. |
32 | static int compute_side(const SkPoint& p0, const SkVector& v, const SkPoint& p) { |
33 | SkVector w = p - p0; |
34 | SkScalar perpDot = v.cross(w); |
35 | if (!SkScalarNearlyZero(perpDot, kCrossTolerance)) { |
36 | return ((perpDot > 0) ? 1 : -1); |
37 | } |
38 | |
39 | return 0; |
40 | } |
41 | |
42 | // Returns 1 for cw, -1 for ccw and 0 if zero signed area (either degenerate or self-intersecting) |
43 | int SkGetPolygonWinding(const SkPoint* polygonVerts, int polygonSize) { |
44 | if (polygonSize < 3) { |
45 | return 0; |
46 | } |
47 | |
48 | // compute area and use sign to determine winding |
49 | SkScalar quadArea = 0; |
50 | SkVector v0 = polygonVerts[1] - polygonVerts[0]; |
51 | for (int curr = 2; curr < polygonSize; ++curr) { |
52 | SkVector v1 = polygonVerts[curr] - polygonVerts[0]; |
53 | quadArea += v0.cross(v1); |
54 | v0 = v1; |
55 | } |
56 | if (SkScalarNearlyZero(quadArea, kCrossTolerance)) { |
57 | return 0; |
58 | } |
59 | // 1 == ccw, -1 == cw |
60 | return (quadArea > 0) ? 1 : -1; |
61 | } |
62 | |
63 | // Compute difference vector to offset p0-p1 'offset' units in direction specified by 'side' |
64 | bool compute_offset_vector(const SkPoint& p0, const SkPoint& p1, SkScalar offset, int side, |
65 | SkPoint* vector) { |
66 | SkASSERT(side == -1 || side == 1); |
67 | // if distances are equal, can just outset by the perpendicular |
68 | SkVector perp = SkVector::Make(p0.fY - p1.fY, p1.fX - p0.fX); |
69 | if (!perp.setLength(offset*side)) { |
70 | return false; |
71 | } |
72 | *vector = perp; |
73 | return true; |
74 | } |
75 | |
76 | // check interval to see if intersection is in segment |
77 | static inline bool outside_interval(SkScalar numer, SkScalar denom, bool denomPositive) { |
78 | return (denomPositive && (numer < 0 || numer > denom)) || |
79 | (!denomPositive && (numer > 0 || numer < denom)); |
80 | } |
81 | |
82 | // special zero-length test when we're using vdotv as a denominator |
83 | static inline bool zero_length(const SkPoint& v, SkScalar vdotv) { |
84 | return !(SkScalarsAreFinite(v.fX, v.fY) && vdotv); |
85 | } |
86 | |
87 | // Compute the intersection 'p' between segments s0 and s1, if any. |
88 | // 's' is the parametric value for the intersection along 's0' & 't' is the same for 's1'. |
89 | // Returns false if there is no intersection. |
90 | // If the length squared of a segment is 0, then we treat the segment as degenerate |
91 | // and use only the first endpoint for tests. |
92 | static bool compute_intersection(const OffsetSegment& s0, const OffsetSegment& s1, |
93 | SkPoint* p, SkScalar* s, SkScalar* t) { |
94 | const SkVector& v0 = s0.fV; |
95 | const SkVector& v1 = s1.fV; |
96 | SkVector w = s1.fP0 - s0.fP0; |
97 | SkScalar denom = v0.cross(v1); |
98 | bool denomPositive = (denom > 0); |
99 | SkScalar sNumer, tNumer; |
100 | if (SkScalarNearlyZero(denom, kCrossTolerance)) { |
101 | // segments are parallel, but not collinear |
102 | if (!SkScalarNearlyZero(w.cross(v0), kCrossTolerance) || |
103 | !SkScalarNearlyZero(w.cross(v1), kCrossTolerance)) { |
104 | return false; |
105 | } |
106 | |
107 | // Check for zero-length segments |
108 | SkScalar v0dotv0 = v0.dot(v0); |
109 | if (zero_length(v0, v0dotv0)) { |
110 | // Both are zero-length |
111 | SkScalar v1dotv1 = v1.dot(v1); |
112 | if (zero_length(v1, v1dotv1)) { |
113 | // Check if they're the same point |
114 | if (!SkPointPriv::CanNormalize(w.fX, w.fY)) { |
115 | *p = s0.fP0; |
116 | *s = 0; |
117 | *t = 0; |
118 | return true; |
119 | } else { |
120 | // Intersection is indeterminate |
121 | return false; |
122 | } |
123 | } |
124 | // Otherwise project segment0's origin onto segment1 |
125 | tNumer = v1.dot(-w); |
126 | denom = v1dotv1; |
127 | if (outside_interval(tNumer, denom, true)) { |
128 | return false; |
129 | } |
130 | sNumer = 0; |
131 | } else { |
132 | // Project segment1's endpoints onto segment0 |
133 | sNumer = v0.dot(w); |
134 | denom = v0dotv0; |
135 | tNumer = 0; |
136 | if (outside_interval(sNumer, denom, true)) { |
137 | // The first endpoint doesn't lie on segment0 |
138 | // If segment1 is degenerate, then there's no collision |
139 | SkScalar v1dotv1 = v1.dot(v1); |
140 | if (zero_length(v1, v1dotv1)) { |
141 | return false; |
142 | } |
143 | |
144 | // Otherwise try the other one |
145 | SkScalar oldSNumer = sNumer; |
146 | sNumer = v0.dot(w + v1); |
147 | tNumer = denom; |
148 | if (outside_interval(sNumer, denom, true)) { |
149 | // it's possible that segment1's interval surrounds segment0 |
150 | // this is false if params have the same signs, and in that case no collision |
151 | if (sNumer*oldSNumer > 0) { |
152 | return false; |
153 | } |
154 | // otherwise project segment0's endpoint onto segment1 instead |
155 | sNumer = 0; |
156 | tNumer = v1.dot(-w); |
157 | denom = v1dotv1; |
158 | } |
159 | } |
160 | } |
161 | } else { |
162 | sNumer = w.cross(v1); |
163 | if (outside_interval(sNumer, denom, denomPositive)) { |
164 | return false; |
165 | } |
166 | tNumer = w.cross(v0); |
167 | if (outside_interval(tNumer, denom, denomPositive)) { |
168 | return false; |
169 | } |
170 | } |
171 | |
172 | SkScalar localS = sNumer/denom; |
173 | SkScalar localT = tNumer/denom; |
174 | |
175 | *p = s0.fP0 + v0*localS; |
176 | *s = localS; |
177 | *t = localT; |
178 | |
179 | return true; |
180 | } |
181 | |
182 | bool SkIsConvexPolygon(const SkPoint* polygonVerts, int polygonSize) { |
183 | if (polygonSize < 3) { |
184 | return false; |
185 | } |
186 | |
187 | SkScalar lastArea = 0; |
188 | SkScalar lastPerpDot = 0; |
189 | |
190 | int prevIndex = polygonSize - 1; |
191 | int currIndex = 0; |
192 | int nextIndex = 1; |
193 | SkPoint origin = polygonVerts[0]; |
194 | SkVector v0 = polygonVerts[currIndex] - polygonVerts[prevIndex]; |
195 | SkVector v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
196 | SkVector w0 = polygonVerts[currIndex] - origin; |
197 | SkVector w1 = polygonVerts[nextIndex] - origin; |
198 | for (int i = 0; i < polygonSize; ++i) { |
199 | if (!polygonVerts[i].isFinite()) { |
200 | return false; |
201 | } |
202 | |
203 | // Check that winding direction is always the same (otherwise we have a reflex vertex) |
204 | SkScalar perpDot = v0.cross(v1); |
205 | if (lastPerpDot*perpDot < 0) { |
206 | return false; |
207 | } |
208 | if (0 != perpDot) { |
209 | lastPerpDot = perpDot; |
210 | } |
211 | |
212 | // If the signed area ever flips it's concave |
213 | // TODO: see if we can verify convexity only with signed area |
214 | SkScalar quadArea = w0.cross(w1); |
215 | if (quadArea*lastArea < 0) { |
216 | return false; |
217 | } |
218 | if (0 != quadArea) { |
219 | lastArea = quadArea; |
220 | } |
221 | |
222 | prevIndex = currIndex; |
223 | currIndex = nextIndex; |
224 | nextIndex = (currIndex + 1) % polygonSize; |
225 | v0 = v1; |
226 | v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
227 | w0 = w1; |
228 | w1 = polygonVerts[nextIndex] - origin; |
229 | } |
230 | |
231 | return true; |
232 | } |
233 | |
234 | struct OffsetEdge { |
235 | OffsetEdge* fPrev; |
236 | OffsetEdge* fNext; |
237 | OffsetSegment fOffset; |
238 | SkPoint fIntersection; |
239 | SkScalar fTValue; |
240 | uint16_t fIndex; |
241 | uint16_t fEnd; |
242 | |
243 | void init(uint16_t start = 0, uint16_t end = 0) { |
244 | fIntersection = fOffset.fP0; |
245 | fTValue = SK_ScalarMin; |
246 | fIndex = start; |
247 | fEnd = end; |
248 | } |
249 | |
250 | // special intersection check that looks for endpoint intersection |
251 | bool checkIntersection(const OffsetEdge* that, |
252 | SkPoint* p, SkScalar* s, SkScalar* t) { |
253 | if (this->fEnd == that->fIndex) { |
254 | SkPoint p1 = this->fOffset.fP0 + this->fOffset.fV; |
255 | if (SkPointPriv::EqualsWithinTolerance(p1, that->fOffset.fP0)) { |
256 | *p = p1; |
257 | *s = SK_Scalar1; |
258 | *t = 0; |
259 | return true; |
260 | } |
261 | } |
262 | |
263 | return compute_intersection(this->fOffset, that->fOffset, p, s, t); |
264 | } |
265 | |
266 | // computes the line intersection and then the "distance" from that to this |
267 | // this is really a signed squared distance, where negative means that |
268 | // the intersection lies inside this->fOffset |
269 | SkScalar computeCrossingDistance(const OffsetEdge* that) { |
270 | const OffsetSegment& s0 = this->fOffset; |
271 | const OffsetSegment& s1 = that->fOffset; |
272 | const SkVector& v0 = s0.fV; |
273 | const SkVector& v1 = s1.fV; |
274 | |
275 | SkScalar denom = v0.cross(v1); |
276 | if (SkScalarNearlyZero(denom, kCrossTolerance)) { |
277 | // segments are parallel |
278 | return SK_ScalarMax; |
279 | } |
280 | |
281 | SkVector w = s1.fP0 - s0.fP0; |
282 | SkScalar localS = w.cross(v1) / denom; |
283 | if (localS < 0) { |
284 | localS = -localS; |
285 | } else { |
286 | localS -= SK_Scalar1; |
287 | } |
288 | |
289 | localS *= SkScalarAbs(localS); |
290 | localS *= v0.dot(v0); |
291 | |
292 | return localS; |
293 | } |
294 | |
295 | }; |
296 | |
297 | static void remove_node(const OffsetEdge* node, OffsetEdge** head) { |
298 | // remove from linked list |
299 | node->fPrev->fNext = node->fNext; |
300 | node->fNext->fPrev = node->fPrev; |
301 | if (node == *head) { |
302 | *head = (node->fNext == node) ? nullptr : node->fNext; |
303 | } |
304 | } |
305 | |
306 | ////////////////////////////////////////////////////////////////////////////////// |
307 | |
308 | // The objective here is to inset all of the edges by the given distance, and then |
309 | // remove any invalid inset edges by detecting right-hand turns. In a ccw polygon, |
310 | // we should only be making left-hand turns (for cw polygons, we use the winding |
311 | // parameter to reverse this). We detect this by checking whether the second intersection |
312 | // on an edge is closer to its tail than the first one. |
313 | // |
314 | // We might also have the case that there is no intersection between two neighboring inset edges. |
315 | // In this case, one edge will lie to the right of the other and should be discarded along with |
316 | // its previous intersection (if any). |
317 | // |
318 | // Note: the assumption is that inputPolygon is convex and has no coincident points. |
319 | // |
320 | bool SkInsetConvexPolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
321 | SkScalar inset, SkTDArray<SkPoint>* insetPolygon) { |
322 | if (inputPolygonSize < 3) { |
323 | return false; |
324 | } |
325 | |
326 | // restrict this to match other routines |
327 | // practically we don't want anything bigger than this anyway |
328 | if (inputPolygonSize > std::numeric_limits<uint16_t>::max()) { |
329 | return false; |
330 | } |
331 | |
332 | // can't inset by a negative or non-finite amount |
333 | if (inset < -SK_ScalarNearlyZero || !SkScalarIsFinite(inset)) { |
334 | return false; |
335 | } |
336 | |
337 | // insetting close to zero just returns the original poly |
338 | if (inset <= SK_ScalarNearlyZero) { |
339 | for (int i = 0; i < inputPolygonSize; ++i) { |
340 | *insetPolygon->push() = inputPolygonVerts[i]; |
341 | } |
342 | return true; |
343 | } |
344 | |
345 | // get winding direction |
346 | int winding = SkGetPolygonWinding(inputPolygonVerts, inputPolygonSize); |
347 | if (0 == winding) { |
348 | return false; |
349 | } |
350 | |
351 | // set up |
352 | SkAutoSTMalloc<64, OffsetEdge> edgeData(inputPolygonSize); |
353 | int prev = inputPolygonSize - 1; |
354 | for (int curr = 0; curr < inputPolygonSize; prev = curr, ++curr) { |
355 | int next = (curr + 1) % inputPolygonSize; |
356 | if (!inputPolygonVerts[curr].isFinite()) { |
357 | return false; |
358 | } |
359 | // check for convexity just to be sure |
360 | if (compute_side(inputPolygonVerts[prev], inputPolygonVerts[curr] - inputPolygonVerts[prev], |
361 | inputPolygonVerts[next])*winding < 0) { |
362 | return false; |
363 | } |
364 | SkVector v = inputPolygonVerts[next] - inputPolygonVerts[curr]; |
365 | SkVector perp = SkVector::Make(-v.fY, v.fX); |
366 | perp.setLength(inset*winding); |
367 | edgeData[curr].fPrev = &edgeData[prev]; |
368 | edgeData[curr].fNext = &edgeData[next]; |
369 | edgeData[curr].fOffset.fP0 = inputPolygonVerts[curr] + perp; |
370 | edgeData[curr].fOffset.fV = v; |
371 | edgeData[curr].init(); |
372 | } |
373 | |
374 | OffsetEdge* head = &edgeData[0]; |
375 | OffsetEdge* currEdge = head; |
376 | OffsetEdge* prevEdge = currEdge->fPrev; |
377 | int insetVertexCount = inputPolygonSize; |
378 | unsigned int iterations = 0; |
379 | unsigned int maxIterations = inputPolygonSize * inputPolygonSize; |
380 | while (head && prevEdge != currEdge) { |
381 | ++iterations; |
382 | // we should check each edge against each other edge at most once |
383 | if (iterations > maxIterations) { |
384 | return false; |
385 | } |
386 | |
387 | SkScalar s, t; |
388 | SkPoint intersection; |
389 | if (compute_intersection(prevEdge->fOffset, currEdge->fOffset, |
390 | &intersection, &s, &t)) { |
391 | // if new intersection is further back on previous inset from the prior intersection |
392 | if (s < prevEdge->fTValue) { |
393 | // no point in considering this one again |
394 | remove_node(prevEdge, &head); |
395 | --insetVertexCount; |
396 | // go back one segment |
397 | prevEdge = prevEdge->fPrev; |
398 | // we've already considered this intersection, we're done |
399 | } else if (currEdge->fTValue > SK_ScalarMin && |
400 | SkPointPriv::EqualsWithinTolerance(intersection, |
401 | currEdge->fIntersection, |
402 | 1.0e-6f)) { |
403 | break; |
404 | } else { |
405 | // add intersection |
406 | currEdge->fIntersection = intersection; |
407 | currEdge->fTValue = t; |
408 | |
409 | // go to next segment |
410 | prevEdge = currEdge; |
411 | currEdge = currEdge->fNext; |
412 | } |
413 | } else { |
414 | // if prev to right side of curr |
415 | int side = winding*compute_side(currEdge->fOffset.fP0, |
416 | currEdge->fOffset.fV, |
417 | prevEdge->fOffset.fP0); |
418 | if (side < 0 && |
419 | side == winding*compute_side(currEdge->fOffset.fP0, |
420 | currEdge->fOffset.fV, |
421 | prevEdge->fOffset.fP0 + prevEdge->fOffset.fV)) { |
422 | // no point in considering this one again |
423 | remove_node(prevEdge, &head); |
424 | --insetVertexCount; |
425 | // go back one segment |
426 | prevEdge = prevEdge->fPrev; |
427 | } else { |
428 | // move to next segment |
429 | remove_node(currEdge, &head); |
430 | --insetVertexCount; |
431 | currEdge = currEdge->fNext; |
432 | } |
433 | } |
434 | } |
435 | |
436 | // store all the valid intersections that aren't nearly coincident |
437 | // TODO: look at the main algorithm and see if we can detect these better |
438 | insetPolygon->reset(); |
439 | if (!head) { |
440 | return false; |
441 | } |
442 | |
443 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
444 | if (insetVertexCount >= 0) { |
445 | insetPolygon->setReserve(insetVertexCount); |
446 | } |
447 | int currIndex = 0; |
448 | *insetPolygon->push() = head->fIntersection; |
449 | currEdge = head->fNext; |
450 | while (currEdge != head) { |
451 | if (!SkPointPriv::EqualsWithinTolerance(currEdge->fIntersection, |
452 | (*insetPolygon)[currIndex], |
453 | kCleanupTolerance)) { |
454 | *insetPolygon->push() = currEdge->fIntersection; |
455 | currIndex++; |
456 | } |
457 | currEdge = currEdge->fNext; |
458 | } |
459 | // make sure the first and last points aren't coincident |
460 | if (currIndex >= 1 && |
461 | SkPointPriv::EqualsWithinTolerance((*insetPolygon)[0], (*insetPolygon)[currIndex], |
462 | kCleanupTolerance)) { |
463 | insetPolygon->pop(); |
464 | } |
465 | |
466 | return SkIsConvexPolygon(insetPolygon->begin(), insetPolygon->count()); |
467 | } |
468 | |
469 | /////////////////////////////////////////////////////////////////////////////////////////// |
470 | |
471 | // compute the number of points needed for a circular join when offsetting a reflex vertex |
472 | bool SkComputeRadialSteps(const SkVector& v1, const SkVector& v2, SkScalar offset, |
473 | SkScalar* rotSin, SkScalar* rotCos, int* n) { |
474 | const SkScalar kRecipPixelsPerArcSegment = 0.25f; |
475 | |
476 | SkScalar rCos = v1.dot(v2); |
477 | if (!SkScalarIsFinite(rCos)) { |
478 | return false; |
479 | } |
480 | SkScalar rSin = v1.cross(v2); |
481 | if (!SkScalarIsFinite(rSin)) { |
482 | return false; |
483 | } |
484 | SkScalar theta = SkScalarATan2(rSin, rCos); |
485 | |
486 | SkScalar floatSteps = SkScalarAbs(offset*theta*kRecipPixelsPerArcSegment); |
487 | // limit the number of steps to at most max uint16_t (that's all we can index) |
488 | // knock one value off the top to account for rounding |
489 | if (floatSteps >= std::numeric_limits<uint16_t>::max()) { |
490 | return false; |
491 | } |
492 | int steps = SkScalarRoundToInt(floatSteps); |
493 | |
494 | SkScalar dTheta = steps > 0 ? theta / steps : 0; |
495 | *rotSin = SkScalarSin(dTheta); |
496 | *rotCos = SkScalarCos(dTheta); |
497 | *n = steps; |
498 | return true; |
499 | } |
500 | |
501 | /////////////////////////////////////////////////////////////////////////////////////////// |
502 | |
503 | // a point is "left" to another if its x-coord is less, or if equal, its y-coord is greater |
504 | static bool left(const SkPoint& p0, const SkPoint& p1) { |
505 | return p0.fX < p1.fX || (!(p0.fX > p1.fX) && p0.fY > p1.fY); |
506 | } |
507 | |
508 | // a point is "right" to another if its x-coord is greater, or if equal, its y-coord is less |
509 | static bool right(const SkPoint& p0, const SkPoint& p1) { |
510 | return p0.fX > p1.fX || (!(p0.fX < p1.fX) && p0.fY < p1.fY); |
511 | } |
512 | |
513 | struct Vertex { |
514 | static bool Left(const Vertex& qv0, const Vertex& qv1) { |
515 | return left(qv0.fPosition, qv1.fPosition); |
516 | } |
517 | |
518 | // packed to fit into 16 bytes (one cache line) |
519 | SkPoint fPosition; |
520 | uint16_t fIndex; // index in unsorted polygon |
521 | uint16_t fPrevIndex; // indices for previous and next vertex in unsorted polygon |
522 | uint16_t fNextIndex; |
523 | uint16_t fFlags; |
524 | }; |
525 | |
526 | enum VertexFlags { |
527 | kPrevLeft_VertexFlag = 0x1, |
528 | kNextLeft_VertexFlag = 0x2, |
529 | }; |
530 | |
531 | struct ActiveEdge { |
532 | ActiveEdge() : fChild{ nullptr, nullptr }, fAbove(nullptr), fBelow(nullptr), fRed(false) {} |
533 | ActiveEdge(const SkPoint& p0, const SkVector& v, uint16_t index0, uint16_t index1) |
534 | : fSegment({ p0, v }) |
535 | , fIndex0(index0) |
536 | , fIndex1(index1) |
537 | , fAbove(nullptr) |
538 | , fBelow(nullptr) |
539 | , fRed(true) { |
540 | fChild[0] = nullptr; |
541 | fChild[1] = nullptr; |
542 | } |
543 | |
544 | // Returns true if "this" is above "that", assuming this->p0 is to the left of that->p0 |
545 | // This is only used to verify the edgelist -- the actual test for insertion/deletion is much |
546 | // simpler because we can make certain assumptions then. |
547 | bool aboveIfLeft(const ActiveEdge* that) const { |
548 | const SkPoint& p0 = this->fSegment.fP0; |
549 | const SkPoint& q0 = that->fSegment.fP0; |
550 | SkASSERT(p0.fX <= q0.fX); |
551 | SkVector d = q0 - p0; |
552 | const SkVector& v = this->fSegment.fV; |
553 | const SkVector& w = that->fSegment.fV; |
554 | // The idea here is that if the vector between the origins of the two segments (d) |
555 | // rotates counterclockwise up to the vector representing the "this" segment (v), |
556 | // then we know that "this" is above "that". If the result is clockwise we say it's below. |
557 | if (this->fIndex0 != that->fIndex0) { |
558 | SkScalar cross = d.cross(v); |
559 | if (cross > kCrossTolerance) { |
560 | return true; |
561 | } else if (cross < -kCrossTolerance) { |
562 | return false; |
563 | } |
564 | } else if (this->fIndex1 == that->fIndex1) { |
565 | return false; |
566 | } |
567 | // At this point either the two origins are nearly equal or the origin of "that" |
568 | // lies on dv. So then we try the same for the vector from the tail of "this" |
569 | // to the head of "that". Again, ccw means "this" is above "that". |
570 | // d = that.P1 - this.P0 |
571 | // = that.fP0 + that.fV - this.fP0 |
572 | // = that.fP0 - this.fP0 + that.fV |
573 | // = old_d + that.fV |
574 | d += w; |
575 | SkScalar cross = d.cross(v); |
576 | if (cross > kCrossTolerance) { |
577 | return true; |
578 | } else if (cross < -kCrossTolerance) { |
579 | return false; |
580 | } |
581 | // If the previous check fails, the two segments are nearly collinear |
582 | // First check y-coord of first endpoints |
583 | if (p0.fX < q0.fX) { |
584 | return (p0.fY >= q0.fY); |
585 | } else if (p0.fY > q0.fY) { |
586 | return true; |
587 | } else if (p0.fY < q0.fY) { |
588 | return false; |
589 | } |
590 | // The first endpoints are the same, so check the other endpoint |
591 | SkPoint p1 = p0 + v; |
592 | SkPoint q1 = q0 + w; |
593 | if (p1.fX < q1.fX) { |
594 | return (p1.fY >= q1.fY); |
595 | } else { |
596 | return (p1.fY > q1.fY); |
597 | } |
598 | } |
599 | |
600 | // same as leftAndAbove(), but generalized |
601 | bool above(const ActiveEdge* that) const { |
602 | const SkPoint& p0 = this->fSegment.fP0; |
603 | const SkPoint& q0 = that->fSegment.fP0; |
604 | if (right(p0, q0)) { |
605 | return !that->aboveIfLeft(this); |
606 | } else { |
607 | return this->aboveIfLeft(that); |
608 | } |
609 | } |
610 | |
611 | bool intersect(const SkPoint& q0, const SkVector& w, uint16_t index0, uint16_t index1) const { |
612 | // check first to see if these edges are neighbors in the polygon |
613 | if (this->fIndex0 == index0 || this->fIndex1 == index0 || |
614 | this->fIndex0 == index1 || this->fIndex1 == index1) { |
615 | return false; |
616 | } |
617 | |
618 | // We don't need the exact intersection point so we can do a simpler test here. |
619 | const SkPoint& p0 = this->fSegment.fP0; |
620 | const SkVector& v = this->fSegment.fV; |
621 | SkPoint p1 = p0 + v; |
622 | SkPoint q1 = q0 + w; |
623 | |
624 | // We assume some x-overlap due to how the edgelist works |
625 | // This allows us to simplify our test |
626 | // We need some slop here because storing the vector and recomputing the second endpoint |
627 | // doesn't necessary give us the original result in floating point. |
628 | // TODO: Store vector as double? Store endpoint as well? |
629 | SkASSERT(q0.fX <= p1.fX + SK_ScalarNearlyZero); |
630 | |
631 | // if each segment straddles the other (i.e., the endpoints have different sides) |
632 | // then they intersect |
633 | bool result; |
634 | if (p0.fX < q0.fX) { |
635 | if (q1.fX < p1.fX) { |
636 | result = (compute_side(p0, v, q0)*compute_side(p0, v, q1) < 0); |
637 | } else { |
638 | result = (compute_side(p0, v, q0)*compute_side(q0, w, p1) > 0); |
639 | } |
640 | } else { |
641 | if (p1.fX < q1.fX) { |
642 | result = (compute_side(q0, w, p0)*compute_side(q0, w, p1) < 0); |
643 | } else { |
644 | result = (compute_side(q0, w, p0)*compute_side(p0, v, q1) > 0); |
645 | } |
646 | } |
647 | return result; |
648 | } |
649 | |
650 | bool intersect(const ActiveEdge* edge) { |
651 | return this->intersect(edge->fSegment.fP0, edge->fSegment.fV, edge->fIndex0, edge->fIndex1); |
652 | } |
653 | |
654 | bool lessThan(const ActiveEdge* that) const { |
655 | SkASSERT(!this->above(this)); |
656 | SkASSERT(!that->above(that)); |
657 | SkASSERT(!(this->above(that) && that->above(this))); |
658 | return this->above(that); |
659 | } |
660 | |
661 | bool equals(uint16_t index0, uint16_t index1) const { |
662 | return (this->fIndex0 == index0 && this->fIndex1 == index1); |
663 | } |
664 | |
665 | OffsetSegment fSegment; |
666 | uint16_t fIndex0; // indices for previous and next vertex in polygon |
667 | uint16_t fIndex1; |
668 | ActiveEdge* fChild[2]; |
669 | ActiveEdge* fAbove; |
670 | ActiveEdge* fBelow; |
671 | int32_t fRed; |
672 | }; |
673 | |
674 | class ActiveEdgeList { |
675 | public: |
676 | ActiveEdgeList(int maxEdges) { |
677 | fAllocation = (char*) sk_malloc_throw(sizeof(ActiveEdge)*maxEdges); |
678 | fCurrFree = 0; |
679 | fMaxFree = maxEdges; |
680 | } |
681 | ~ActiveEdgeList() { |
682 | fTreeHead.fChild[1] = nullptr; |
683 | sk_free(fAllocation); |
684 | } |
685 | |
686 | bool insert(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { |
687 | SkVector v = p1 - p0; |
688 | if (!v.isFinite()) { |
689 | return false; |
690 | } |
691 | // empty tree case -- easy |
692 | if (!fTreeHead.fChild[1]) { |
693 | ActiveEdge* root = fTreeHead.fChild[1] = this->allocate(p0, v, index0, index1); |
694 | SkASSERT(root); |
695 | if (!root) { |
696 | return false; |
697 | } |
698 | root->fRed = false; |
699 | return true; |
700 | } |
701 | |
702 | // set up helpers |
703 | ActiveEdge* top = &fTreeHead; |
704 | ActiveEdge *grandparent = nullptr; |
705 | ActiveEdge *parent = nullptr; |
706 | ActiveEdge *curr = top->fChild[1]; |
707 | int dir = 0; |
708 | int last = 0; // ? |
709 | // predecessor and successor, for intersection check |
710 | ActiveEdge* pred = nullptr; |
711 | ActiveEdge* succ = nullptr; |
712 | |
713 | // search down the tree |
714 | while (true) { |
715 | if (!curr) { |
716 | // check for intersection with predecessor and successor |
717 | if ((pred && pred->intersect(p0, v, index0, index1)) || |
718 | (succ && succ->intersect(p0, v, index0, index1))) { |
719 | return false; |
720 | } |
721 | // insert new node at bottom |
722 | parent->fChild[dir] = curr = this->allocate(p0, v, index0, index1); |
723 | SkASSERT(curr); |
724 | if (!curr) { |
725 | return false; |
726 | } |
727 | curr->fAbove = pred; |
728 | curr->fBelow = succ; |
729 | if (pred) { |
730 | pred->fBelow = curr; |
731 | } |
732 | if (succ) { |
733 | succ->fAbove = curr; |
734 | } |
735 | if (IsRed(parent)) { |
736 | int dir2 = (top->fChild[1] == grandparent); |
737 | if (curr == parent->fChild[last]) { |
738 | top->fChild[dir2] = SingleRotation(grandparent, !last); |
739 | } else { |
740 | top->fChild[dir2] = DoubleRotation(grandparent, !last); |
741 | } |
742 | } |
743 | break; |
744 | } else if (IsRed(curr->fChild[0]) && IsRed(curr->fChild[1])) { |
745 | // color flip |
746 | curr->fRed = true; |
747 | curr->fChild[0]->fRed = false; |
748 | curr->fChild[1]->fRed = false; |
749 | if (IsRed(parent)) { |
750 | int dir2 = (top->fChild[1] == grandparent); |
751 | if (curr == parent->fChild[last]) { |
752 | top->fChild[dir2] = SingleRotation(grandparent, !last); |
753 | } else { |
754 | top->fChild[dir2] = DoubleRotation(grandparent, !last); |
755 | } |
756 | } |
757 | } |
758 | |
759 | last = dir; |
760 | int side; |
761 | // check to see if segment is above or below |
762 | if (curr->fIndex0 == index0) { |
763 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); |
764 | } else { |
765 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); |
766 | } |
767 | if (0 == side) { |
768 | return false; |
769 | } |
770 | dir = (side < 0); |
771 | |
772 | if (0 == dir) { |
773 | succ = curr; |
774 | } else { |
775 | pred = curr; |
776 | } |
777 | |
778 | // update helpers |
779 | if (grandparent) { |
780 | top = grandparent; |
781 | } |
782 | grandparent = parent; |
783 | parent = curr; |
784 | curr = curr->fChild[dir]; |
785 | } |
786 | |
787 | // update root and make it black |
788 | fTreeHead.fChild[1]->fRed = false; |
789 | |
790 | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); |
791 | |
792 | return true; |
793 | } |
794 | |
795 | // replaces edge p0p1 with p1p2 |
796 | bool replace(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
797 | uint16_t index0, uint16_t index1, uint16_t index2) { |
798 | if (!fTreeHead.fChild[1]) { |
799 | return false; |
800 | } |
801 | |
802 | SkVector v = p2 - p1; |
803 | ActiveEdge* curr = &fTreeHead; |
804 | ActiveEdge* found = nullptr; |
805 | int dir = 1; |
806 | |
807 | // search |
808 | while (curr->fChild[dir] != nullptr) { |
809 | // update helpers |
810 | curr = curr->fChild[dir]; |
811 | // save found node |
812 | if (curr->equals(index0, index1)) { |
813 | found = curr; |
814 | break; |
815 | } else { |
816 | // check to see if segment is above or below |
817 | int side; |
818 | if (curr->fIndex1 == index1) { |
819 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); |
820 | } else { |
821 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); |
822 | } |
823 | if (0 == side) { |
824 | return false; |
825 | } |
826 | dir = (side < 0); |
827 | } |
828 | } |
829 | |
830 | if (!found) { |
831 | return false; |
832 | } |
833 | |
834 | // replace if found |
835 | ActiveEdge* pred = found->fAbove; |
836 | ActiveEdge* succ = found->fBelow; |
837 | // check deletion and insert intersection cases |
838 | if (pred && (pred->intersect(found) || pred->intersect(p1, v, index1, index2))) { |
839 | return false; |
840 | } |
841 | if (succ && (succ->intersect(found) || succ->intersect(p1, v, index1, index2))) { |
842 | return false; |
843 | } |
844 | found->fSegment.fP0 = p1; |
845 | found->fSegment.fV = v; |
846 | found->fIndex0 = index1; |
847 | found->fIndex1 = index2; |
848 | // above and below should stay the same |
849 | |
850 | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); |
851 | |
852 | return true; |
853 | } |
854 | |
855 | bool remove(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { |
856 | if (!fTreeHead.fChild[1]) { |
857 | return false; |
858 | } |
859 | |
860 | ActiveEdge* curr = &fTreeHead; |
861 | ActiveEdge* parent = nullptr; |
862 | ActiveEdge* grandparent = nullptr; |
863 | ActiveEdge* found = nullptr; |
864 | int dir = 1; |
865 | |
866 | // search and push a red node down |
867 | while (curr->fChild[dir] != nullptr) { |
868 | int last = dir; |
869 | |
870 | // update helpers |
871 | grandparent = parent; |
872 | parent = curr; |
873 | curr = curr->fChild[dir]; |
874 | // save found node |
875 | if (curr->equals(index0, index1)) { |
876 | found = curr; |
877 | dir = 0; |
878 | } else { |
879 | // check to see if segment is above or below |
880 | int side; |
881 | if (curr->fIndex1 == index1) { |
882 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p0); |
883 | } else { |
884 | side = compute_side(curr->fSegment.fP0, curr->fSegment.fV, p1); |
885 | } |
886 | if (0 == side) { |
887 | return false; |
888 | } |
889 | dir = (side < 0); |
890 | } |
891 | |
892 | // push the red node down |
893 | if (!IsRed(curr) && !IsRed(curr->fChild[dir])) { |
894 | if (IsRed(curr->fChild[!dir])) { |
895 | parent = parent->fChild[last] = SingleRotation(curr, dir); |
896 | } else { |
897 | ActiveEdge *s = parent->fChild[!last]; |
898 | |
899 | if (s != NULL) { |
900 | if (!IsRed(s->fChild[!last]) && !IsRed(s->fChild[last])) { |
901 | // color flip |
902 | parent->fRed = false; |
903 | s->fRed = true; |
904 | curr->fRed = true; |
905 | } else { |
906 | int dir2 = (grandparent->fChild[1] == parent); |
907 | |
908 | if (IsRed(s->fChild[last])) { |
909 | grandparent->fChild[dir2] = DoubleRotation(parent, last); |
910 | } else if (IsRed(s->fChild[!last])) { |
911 | grandparent->fChild[dir2] = SingleRotation(parent, last); |
912 | } |
913 | |
914 | // ensure correct coloring |
915 | curr->fRed = grandparent->fChild[dir2]->fRed = true; |
916 | grandparent->fChild[dir2]->fChild[0]->fRed = false; |
917 | grandparent->fChild[dir2]->fChild[1]->fRed = false; |
918 | } |
919 | } |
920 | } |
921 | } |
922 | } |
923 | |
924 | // replace and remove if found |
925 | if (found) { |
926 | ActiveEdge* pred = found->fAbove; |
927 | ActiveEdge* succ = found->fBelow; |
928 | if ((pred && pred->intersect(found)) || (succ && succ->intersect(found))) { |
929 | return false; |
930 | } |
931 | if (found != curr) { |
932 | found->fSegment = curr->fSegment; |
933 | found->fIndex0 = curr->fIndex0; |
934 | found->fIndex1 = curr->fIndex1; |
935 | found->fAbove = curr->fAbove; |
936 | pred = found->fAbove; |
937 | // we don't need to set found->fBelow here |
938 | } else { |
939 | if (succ) { |
940 | succ->fAbove = pred; |
941 | } |
942 | } |
943 | if (pred) { |
944 | pred->fBelow = curr->fBelow; |
945 | } |
946 | parent->fChild[parent->fChild[1] == curr] = curr->fChild[!curr->fChild[0]]; |
947 | |
948 | // no need to delete |
949 | curr->fAbove = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); |
950 | curr->fBelow = reinterpret_cast<ActiveEdge*>(0xdeadbeefll); |
951 | if (fTreeHead.fChild[1]) { |
952 | fTreeHead.fChild[1]->fRed = false; |
953 | } |
954 | } |
955 | |
956 | // update root and make it black |
957 | if (fTreeHead.fChild[1]) { |
958 | fTreeHead.fChild[1]->fRed = false; |
959 | } |
960 | |
961 | SkDEBUGCODE(VerifyTree(fTreeHead.fChild[1])); |
962 | |
963 | return true; |
964 | } |
965 | |
966 | private: |
967 | // allocator |
968 | ActiveEdge * allocate(const SkPoint& p0, const SkPoint& p1, uint16_t index0, uint16_t index1) { |
969 | if (fCurrFree >= fMaxFree) { |
970 | return nullptr; |
971 | } |
972 | char* bytes = fAllocation + sizeof(ActiveEdge)*fCurrFree; |
973 | ++fCurrFree; |
974 | return new(bytes) ActiveEdge(p0, p1, index0, index1); |
975 | } |
976 | |
977 | /////////////////////////////////////////////////////////////////////////////////// |
978 | // Red-black tree methods |
979 | /////////////////////////////////////////////////////////////////////////////////// |
980 | static bool IsRed(const ActiveEdge* node) { |
981 | return node && node->fRed; |
982 | } |
983 | |
984 | static ActiveEdge* SingleRotation(ActiveEdge* node, int dir) { |
985 | ActiveEdge* tmp = node->fChild[!dir]; |
986 | |
987 | node->fChild[!dir] = tmp->fChild[dir]; |
988 | tmp->fChild[dir] = node; |
989 | |
990 | node->fRed = true; |
991 | tmp->fRed = false; |
992 | |
993 | return tmp; |
994 | } |
995 | |
996 | static ActiveEdge* DoubleRotation(ActiveEdge* node, int dir) { |
997 | node->fChild[!dir] = SingleRotation(node->fChild[!dir], !dir); |
998 | |
999 | return SingleRotation(node, dir); |
1000 | } |
1001 | |
1002 | // returns black link count |
1003 | static int VerifyTree(const ActiveEdge* tree) { |
1004 | if (!tree) { |
1005 | return 1; |
1006 | } |
1007 | |
1008 | const ActiveEdge* left = tree->fChild[0]; |
1009 | const ActiveEdge* right = tree->fChild[1]; |
1010 | |
1011 | // no consecutive red links |
1012 | if (IsRed(tree) && (IsRed(left) || IsRed(right))) { |
1013 | SkASSERT(false); |
1014 | return 0; |
1015 | } |
1016 | |
1017 | // check secondary links |
1018 | if (tree->fAbove) { |
1019 | SkASSERT(tree->fAbove->fBelow == tree); |
1020 | SkASSERT(tree->fAbove->lessThan(tree)); |
1021 | } |
1022 | if (tree->fBelow) { |
1023 | SkASSERT(tree->fBelow->fAbove == tree); |
1024 | SkASSERT(tree->lessThan(tree->fBelow)); |
1025 | } |
1026 | |
1027 | // violates binary tree order |
1028 | if ((left && tree->lessThan(left)) || (right && right->lessThan(tree))) { |
1029 | SkASSERT(false); |
1030 | return 0; |
1031 | } |
1032 | |
1033 | int leftCount = VerifyTree(left); |
1034 | int rightCount = VerifyTree(right); |
1035 | |
1036 | // return black link count |
1037 | if (leftCount != 0 && rightCount != 0) { |
1038 | // black height mismatch |
1039 | if (leftCount != rightCount) { |
1040 | SkASSERT(false); |
1041 | return 0; |
1042 | } |
1043 | return IsRed(tree) ? leftCount : leftCount + 1; |
1044 | } else { |
1045 | return 0; |
1046 | } |
1047 | } |
1048 | |
1049 | ActiveEdge fTreeHead; |
1050 | char* fAllocation; |
1051 | int fCurrFree; |
1052 | int fMaxFree; |
1053 | }; |
1054 | |
1055 | // Here we implement a sweep line algorithm to determine whether the provided points |
1056 | // represent a simple polygon, i.e., the polygon is non-self-intersecting. |
1057 | // We first insert the vertices into a priority queue sorting horizontally from left to right. |
1058 | // Then as we pop the vertices from the queue we generate events which indicate that an edge |
1059 | // should be added or removed from an edge list. If any intersections are detected in the edge |
1060 | // list, then we know the polygon is self-intersecting and hence not simple. |
1061 | bool SkIsSimplePolygon(const SkPoint* polygon, int polygonSize) { |
1062 | if (polygonSize < 3) { |
1063 | return false; |
1064 | } |
1065 | |
1066 | // If it's convex, it's simple |
1067 | if (SkIsConvexPolygon(polygon, polygonSize)) { |
1068 | return true; |
1069 | } |
1070 | |
1071 | // practically speaking, it takes too long to process large polygons |
1072 | if (polygonSize > 2048) { |
1073 | return false; |
1074 | } |
1075 | |
1076 | SkTDPQueue <Vertex, Vertex::Left> vertexQueue(polygonSize); |
1077 | for (int i = 0; i < polygonSize; ++i) { |
1078 | Vertex newVertex; |
1079 | if (!polygon[i].isFinite()) { |
1080 | return false; |
1081 | } |
1082 | newVertex.fPosition = polygon[i]; |
1083 | newVertex.fIndex = i; |
1084 | newVertex.fPrevIndex = (i - 1 + polygonSize) % polygonSize; |
1085 | newVertex.fNextIndex = (i + 1) % polygonSize; |
1086 | newVertex.fFlags = 0; |
1087 | if (left(polygon[newVertex.fPrevIndex], polygon[i])) { |
1088 | newVertex.fFlags |= kPrevLeft_VertexFlag; |
1089 | } |
1090 | if (left(polygon[newVertex.fNextIndex], polygon[i])) { |
1091 | newVertex.fFlags |= kNextLeft_VertexFlag; |
1092 | } |
1093 | vertexQueue.insert(newVertex); |
1094 | } |
1095 | |
1096 | // pop each vertex from the queue and generate events depending on |
1097 | // where it lies relative to its neighboring edges |
1098 | ActiveEdgeList sweepLine(polygonSize); |
1099 | while (vertexQueue.count() > 0) { |
1100 | const Vertex& v = vertexQueue.peek(); |
1101 | |
1102 | // both to the right -- insert both |
1103 | if (v.fFlags == 0) { |
1104 | if (!sweepLine.insert(v.fPosition, polygon[v.fPrevIndex], v.fIndex, v.fPrevIndex)) { |
1105 | break; |
1106 | } |
1107 | if (!sweepLine.insert(v.fPosition, polygon[v.fNextIndex], v.fIndex, v.fNextIndex)) { |
1108 | break; |
1109 | } |
1110 | // both to the left -- remove both |
1111 | } else if (v.fFlags == (kPrevLeft_VertexFlag | kNextLeft_VertexFlag)) { |
1112 | if (!sweepLine.remove(polygon[v.fPrevIndex], v.fPosition, v.fPrevIndex, v.fIndex)) { |
1113 | break; |
1114 | } |
1115 | if (!sweepLine.remove(polygon[v.fNextIndex], v.fPosition, v.fNextIndex, v.fIndex)) { |
1116 | break; |
1117 | } |
1118 | // one to left and right -- replace one with another |
1119 | } else { |
1120 | if (v.fFlags & kPrevLeft_VertexFlag) { |
1121 | if (!sweepLine.replace(polygon[v.fPrevIndex], v.fPosition, polygon[v.fNextIndex], |
1122 | v.fPrevIndex, v.fIndex, v.fNextIndex)) { |
1123 | break; |
1124 | } |
1125 | } else { |
1126 | SkASSERT(v.fFlags & kNextLeft_VertexFlag); |
1127 | if (!sweepLine.replace(polygon[v.fNextIndex], v.fPosition, polygon[v.fPrevIndex], |
1128 | v.fNextIndex, v.fIndex, v.fPrevIndex)) { |
1129 | break; |
1130 | } |
1131 | } |
1132 | } |
1133 | |
1134 | vertexQueue.pop(); |
1135 | } |
1136 | |
1137 | return (vertexQueue.count() == 0); |
1138 | } |
1139 | |
1140 | /////////////////////////////////////////////////////////////////////////////////////////// |
1141 | |
1142 | // helper function for SkOffsetSimplePolygon |
1143 | static void setup_offset_edge(OffsetEdge* currEdge, |
1144 | const SkPoint& endpoint0, const SkPoint& endpoint1, |
1145 | uint16_t startIndex, uint16_t endIndex) { |
1146 | currEdge->fOffset.fP0 = endpoint0; |
1147 | currEdge->fOffset.fV = endpoint1 - endpoint0; |
1148 | currEdge->init(startIndex, endIndex); |
1149 | } |
1150 | |
1151 | static bool is_reflex_vertex(const SkPoint* inputPolygonVerts, int winding, SkScalar offset, |
1152 | uint16_t prevIndex, uint16_t currIndex, uint16_t nextIndex) { |
1153 | int side = compute_side(inputPolygonVerts[prevIndex], |
1154 | inputPolygonVerts[currIndex] - inputPolygonVerts[prevIndex], |
1155 | inputPolygonVerts[nextIndex]); |
1156 | // if reflex point, we need to add extra edges |
1157 | return (side*winding*offset < 0); |
1158 | } |
1159 | |
1160 | bool SkOffsetSimplePolygon(const SkPoint* inputPolygonVerts, int inputPolygonSize, |
1161 | const SkRect& bounds, SkScalar offset, |
1162 | SkTDArray<SkPoint>* offsetPolygon, SkTDArray<int>* polygonIndices) { |
1163 | if (inputPolygonSize < 3) { |
1164 | return false; |
1165 | } |
1166 | |
1167 | // need to be able to represent all the vertices in the 16-bit indices |
1168 | if (inputPolygonSize >= std::numeric_limits<uint16_t>::max()) { |
1169 | return false; |
1170 | } |
1171 | |
1172 | if (!SkScalarIsFinite(offset)) { |
1173 | return false; |
1174 | } |
1175 | |
1176 | // can't inset more than the half bounds of the polygon |
1177 | if (offset > std::min(SkTAbs(SK_ScalarHalf*bounds.width()), |
1178 | SkTAbs(SK_ScalarHalf*bounds.height()))) { |
1179 | return false; |
1180 | } |
1181 | |
1182 | // offsetting close to zero just returns the original poly |
1183 | if (SkScalarNearlyZero(offset)) { |
1184 | for (int i = 0; i < inputPolygonSize; ++i) { |
1185 | *offsetPolygon->push() = inputPolygonVerts[i]; |
1186 | if (polygonIndices) { |
1187 | *polygonIndices->push() = i; |
1188 | } |
1189 | } |
1190 | return true; |
1191 | } |
1192 | |
1193 | // get winding direction |
1194 | int winding = SkGetPolygonWinding(inputPolygonVerts, inputPolygonSize); |
1195 | if (0 == winding) { |
1196 | return false; |
1197 | } |
1198 | |
1199 | // build normals |
1200 | SkAutoSTMalloc<64, SkVector> normals(inputPolygonSize); |
1201 | unsigned int numEdges = 0; |
1202 | for (int currIndex = 0, prevIndex = inputPolygonSize - 1; |
1203 | currIndex < inputPolygonSize; |
1204 | prevIndex = currIndex, ++currIndex) { |
1205 | if (!inputPolygonVerts[currIndex].isFinite()) { |
1206 | return false; |
1207 | } |
1208 | int nextIndex = (currIndex + 1) % inputPolygonSize; |
1209 | if (!compute_offset_vector(inputPolygonVerts[currIndex], inputPolygonVerts[nextIndex], |
1210 | offset, winding, &normals[currIndex])) { |
1211 | return false; |
1212 | } |
1213 | if (currIndex > 0) { |
1214 | // if reflex point, we need to add extra edges |
1215 | if (is_reflex_vertex(inputPolygonVerts, winding, offset, |
1216 | prevIndex, currIndex, nextIndex)) { |
1217 | SkScalar rotSin, rotCos; |
1218 | int numSteps; |
1219 | if (!SkComputeRadialSteps(normals[prevIndex], normals[currIndex], offset, |
1220 | &rotSin, &rotCos, &numSteps)) { |
1221 | return false; |
1222 | } |
1223 | numEdges += std::max(numSteps, 1); |
1224 | } |
1225 | } |
1226 | numEdges++; |
1227 | } |
1228 | // finish up the edge counting |
1229 | if (is_reflex_vertex(inputPolygonVerts, winding, offset, inputPolygonSize-1, 0, 1)) { |
1230 | SkScalar rotSin, rotCos; |
1231 | int numSteps; |
1232 | if (!SkComputeRadialSteps(normals[inputPolygonSize-1], normals[0], offset, |
1233 | &rotSin, &rotCos, &numSteps)) { |
1234 | return false; |
1235 | } |
1236 | numEdges += std::max(numSteps, 1); |
1237 | } |
1238 | |
1239 | // Make sure we don't overflow the max array count. |
1240 | // We shouldn't overflow numEdges, as SkComputeRadialSteps returns a max of 2^16-1, |
1241 | // and we have a max of 2^16-1 original vertices. |
1242 | if (numEdges > (unsigned int)std::numeric_limits<int32_t>::max()) { |
1243 | return false; |
1244 | } |
1245 | |
1246 | // build initial offset edge list |
1247 | SkSTArray<64, OffsetEdge> edgeData(numEdges); |
1248 | OffsetEdge* prevEdge = nullptr; |
1249 | for (int currIndex = 0, prevIndex = inputPolygonSize - 1; |
1250 | currIndex < inputPolygonSize; |
1251 | prevIndex = currIndex, ++currIndex) { |
1252 | int nextIndex = (currIndex + 1) % inputPolygonSize; |
1253 | // if reflex point, fill in curve |
1254 | if (is_reflex_vertex(inputPolygonVerts, winding, offset, |
1255 | prevIndex, currIndex, nextIndex)) { |
1256 | SkScalar rotSin, rotCos; |
1257 | int numSteps; |
1258 | SkVector prevNormal = normals[prevIndex]; |
1259 | if (!SkComputeRadialSteps(prevNormal, normals[currIndex], offset, |
1260 | &rotSin, &rotCos, &numSteps)) { |
1261 | return false; |
1262 | } |
1263 | auto currEdge = edgeData.push_back_n(std::max(numSteps, 1)); |
1264 | for (int i = 0; i < numSteps - 1; ++i) { |
1265 | SkVector currNormal = SkVector::Make(prevNormal.fX*rotCos - prevNormal.fY*rotSin, |
1266 | prevNormal.fY*rotCos + prevNormal.fX*rotSin); |
1267 | setup_offset_edge(currEdge, |
1268 | inputPolygonVerts[currIndex] + prevNormal, |
1269 | inputPolygonVerts[currIndex] + currNormal, |
1270 | currIndex, currIndex); |
1271 | prevNormal = currNormal; |
1272 | currEdge->fPrev = prevEdge; |
1273 | if (prevEdge) { |
1274 | prevEdge->fNext = currEdge; |
1275 | } |
1276 | prevEdge = currEdge; |
1277 | ++currEdge; |
1278 | } |
1279 | setup_offset_edge(currEdge, |
1280 | inputPolygonVerts[currIndex] + prevNormal, |
1281 | inputPolygonVerts[currIndex] + normals[currIndex], |
1282 | currIndex, currIndex); |
1283 | currEdge->fPrev = prevEdge; |
1284 | if (prevEdge) { |
1285 | prevEdge->fNext = currEdge; |
1286 | } |
1287 | prevEdge = currEdge; |
1288 | } |
1289 | |
1290 | // Add the edge |
1291 | auto currEdge = edgeData.push_back_n(1); |
1292 | setup_offset_edge(currEdge, |
1293 | inputPolygonVerts[currIndex] + normals[currIndex], |
1294 | inputPolygonVerts[nextIndex] + normals[currIndex], |
1295 | currIndex, nextIndex); |
1296 | currEdge->fPrev = prevEdge; |
1297 | if (prevEdge) { |
1298 | prevEdge->fNext = currEdge; |
1299 | } |
1300 | prevEdge = currEdge; |
1301 | } |
1302 | // close up the linked list |
1303 | SkASSERT(prevEdge); |
1304 | prevEdge->fNext = &edgeData[0]; |
1305 | edgeData[0].fPrev = prevEdge; |
1306 | |
1307 | // now clip edges |
1308 | SkASSERT(edgeData.count() == (int)numEdges); |
1309 | auto head = &edgeData[0]; |
1310 | auto currEdge = head; |
1311 | unsigned int offsetVertexCount = numEdges; |
1312 | unsigned long long iterations = 0; |
1313 | unsigned long long maxIterations = (unsigned long long)(numEdges) * numEdges; |
1314 | while (head && prevEdge != currEdge && offsetVertexCount > 0) { |
1315 | ++iterations; |
1316 | // we should check each edge against each other edge at most once |
1317 | if (iterations > maxIterations) { |
1318 | return false; |
1319 | } |
1320 | |
1321 | SkScalar s, t; |
1322 | SkPoint intersection; |
1323 | if (prevEdge->checkIntersection(currEdge, &intersection, &s, &t)) { |
1324 | // if new intersection is further back on previous inset from the prior intersection |
1325 | if (s < prevEdge->fTValue) { |
1326 | // no point in considering this one again |
1327 | remove_node(prevEdge, &head); |
1328 | --offsetVertexCount; |
1329 | // go back one segment |
1330 | prevEdge = prevEdge->fPrev; |
1331 | // we've already considered this intersection, we're done |
1332 | } else if (currEdge->fTValue > SK_ScalarMin && |
1333 | SkPointPriv::EqualsWithinTolerance(intersection, |
1334 | currEdge->fIntersection, |
1335 | 1.0e-6f)) { |
1336 | break; |
1337 | } else { |
1338 | // add intersection |
1339 | currEdge->fIntersection = intersection; |
1340 | currEdge->fTValue = t; |
1341 | currEdge->fIndex = prevEdge->fEnd; |
1342 | |
1343 | // go to next segment |
1344 | prevEdge = currEdge; |
1345 | currEdge = currEdge->fNext; |
1346 | } |
1347 | } else { |
1348 | // If there is no intersection, we want to minimize the distance between |
1349 | // the point where the segment lines cross and the segments themselves. |
1350 | OffsetEdge* prevPrevEdge = prevEdge->fPrev; |
1351 | OffsetEdge* currNextEdge = currEdge->fNext; |
1352 | SkScalar dist0 = currEdge->computeCrossingDistance(prevPrevEdge); |
1353 | SkScalar dist1 = prevEdge->computeCrossingDistance(currNextEdge); |
1354 | // if both lead to direct collision |
1355 | if (dist0 < 0 && dist1 < 0) { |
1356 | // check first to see if either represent parts of one contour |
1357 | SkPoint p1 = prevPrevEdge->fOffset.fP0 + prevPrevEdge->fOffset.fV; |
1358 | bool prevSameContour = SkPointPriv::EqualsWithinTolerance(p1, |
1359 | prevEdge->fOffset.fP0); |
1360 | p1 = currEdge->fOffset.fP0 + currEdge->fOffset.fV; |
1361 | bool currSameContour = SkPointPriv::EqualsWithinTolerance(p1, |
1362 | currNextEdge->fOffset.fP0); |
1363 | |
1364 | // want to step along contour to find intersections rather than jump to new one |
1365 | if (currSameContour && !prevSameContour) { |
1366 | remove_node(currEdge, &head); |
1367 | currEdge = currNextEdge; |
1368 | --offsetVertexCount; |
1369 | continue; |
1370 | } else if (prevSameContour && !currSameContour) { |
1371 | remove_node(prevEdge, &head); |
1372 | prevEdge = prevPrevEdge; |
1373 | --offsetVertexCount; |
1374 | continue; |
1375 | } |
1376 | } |
1377 | |
1378 | // otherwise minimize collision distance along segment |
1379 | if (dist0 < dist1) { |
1380 | remove_node(prevEdge, &head); |
1381 | prevEdge = prevPrevEdge; |
1382 | } else { |
1383 | remove_node(currEdge, &head); |
1384 | currEdge = currNextEdge; |
1385 | } |
1386 | --offsetVertexCount; |
1387 | } |
1388 | } |
1389 | |
1390 | // store all the valid intersections that aren't nearly coincident |
1391 | // TODO: look at the main algorithm and see if we can detect these better |
1392 | offsetPolygon->reset(); |
1393 | if (!head || offsetVertexCount == 0 || |
1394 | offsetVertexCount >= std::numeric_limits<uint16_t>::max()) { |
1395 | return false; |
1396 | } |
1397 | |
1398 | static constexpr SkScalar kCleanupTolerance = 0.01f; |
1399 | offsetPolygon->setReserve(offsetVertexCount); |
1400 | int currIndex = 0; |
1401 | *offsetPolygon->push() = head->fIntersection; |
1402 | if (polygonIndices) { |
1403 | *polygonIndices->push() = head->fIndex; |
1404 | } |
1405 | currEdge = head->fNext; |
1406 | while (currEdge != head) { |
1407 | if (!SkPointPriv::EqualsWithinTolerance(currEdge->fIntersection, |
1408 | (*offsetPolygon)[currIndex], |
1409 | kCleanupTolerance)) { |
1410 | *offsetPolygon->push() = currEdge->fIntersection; |
1411 | if (polygonIndices) { |
1412 | *polygonIndices->push() = currEdge->fIndex; |
1413 | } |
1414 | currIndex++; |
1415 | } |
1416 | currEdge = currEdge->fNext; |
1417 | } |
1418 | // make sure the first and last points aren't coincident |
1419 | if (currIndex >= 1 && |
1420 | SkPointPriv::EqualsWithinTolerance((*offsetPolygon)[0], (*offsetPolygon)[currIndex], |
1421 | kCleanupTolerance)) { |
1422 | offsetPolygon->pop(); |
1423 | if (polygonIndices) { |
1424 | polygonIndices->pop(); |
1425 | } |
1426 | } |
1427 | |
1428 | // check winding of offset polygon (it should be same as the original polygon) |
1429 | SkScalar offsetWinding = SkGetPolygonWinding(offsetPolygon->begin(), offsetPolygon->count()); |
1430 | |
1431 | return (winding*offsetWinding > 0 && |
1432 | SkIsSimplePolygon(offsetPolygon->begin(), offsetPolygon->count())); |
1433 | } |
1434 | |
1435 | ////////////////////////////////////////////////////////////////////////////////////////// |
1436 | |
1437 | struct TriangulationVertex { |
1438 | SK_DECLARE_INTERNAL_LLIST_INTERFACE(TriangulationVertex); |
1439 | |
1440 | enum class VertexType { kConvex, kReflex }; |
1441 | |
1442 | SkPoint fPosition; |
1443 | VertexType fVertexType; |
1444 | uint16_t fIndex; |
1445 | uint16_t fPrevIndex; |
1446 | uint16_t fNextIndex; |
1447 | }; |
1448 | |
1449 | static void compute_triangle_bounds(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
1450 | SkRect* bounds) { |
1451 | Sk4s min, max; |
1452 | min = max = Sk4s(p0.fX, p0.fY, p0.fX, p0.fY); |
1453 | Sk4s xy(p1.fX, p1.fY, p2.fX, p2.fY); |
1454 | min = Sk4s::Min(min, xy); |
1455 | max = Sk4s::Max(max, xy); |
1456 | bounds->setLTRB(std::min(min[0], min[2]), std::min(min[1], min[3]), |
1457 | std::max(max[0], max[2]), std::max(max[1], max[3])); |
1458 | } |
1459 | |
1460 | // test to see if point p is in triangle p0p1p2. |
1461 | // for now assuming strictly inside -- if on the edge it's outside |
1462 | static bool point_in_triangle(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
1463 | const SkPoint& p) { |
1464 | SkVector v0 = p1 - p0; |
1465 | SkVector v1 = p2 - p1; |
1466 | SkScalar n = v0.cross(v1); |
1467 | |
1468 | SkVector w0 = p - p0; |
1469 | if (n*v0.cross(w0) < SK_ScalarNearlyZero) { |
1470 | return false; |
1471 | } |
1472 | |
1473 | SkVector w1 = p - p1; |
1474 | if (n*v1.cross(w1) < SK_ScalarNearlyZero) { |
1475 | return false; |
1476 | } |
1477 | |
1478 | SkVector v2 = p0 - p2; |
1479 | SkVector w2 = p - p2; |
1480 | if (n*v2.cross(w2) < SK_ScalarNearlyZero) { |
1481 | return false; |
1482 | } |
1483 | |
1484 | return true; |
1485 | } |
1486 | |
1487 | // Data structure to track reflex vertices and check whether any are inside a given triangle |
1488 | class ReflexHash { |
1489 | public: |
1490 | bool init(const SkRect& bounds, int vertexCount) { |
1491 | fBounds = bounds; |
1492 | fNumVerts = 0; |
1493 | SkScalar width = bounds.width(); |
1494 | SkScalar height = bounds.height(); |
1495 | if (!SkScalarIsFinite(width) || !SkScalarIsFinite(height)) { |
1496 | return false; |
1497 | } |
1498 | |
1499 | // We want vertexCount grid cells, roughly distributed to match the bounds ratio |
1500 | SkScalar hCount = SkScalarSqrt(sk_ieee_float_divide(vertexCount*width, height)); |
1501 | if (!SkScalarIsFinite(hCount)) { |
1502 | return false; |
1503 | } |
1504 | fHCount = std::max(std::min(SkScalarRoundToInt(hCount), vertexCount), 1); |
1505 | fVCount = vertexCount/fHCount; |
1506 | fGridConversion.set(sk_ieee_float_divide(fHCount - 0.001f, width), |
1507 | sk_ieee_float_divide(fVCount - 0.001f, height)); |
1508 | if (!fGridConversion.isFinite()) { |
1509 | return false; |
1510 | } |
1511 | |
1512 | fGrid.setCount(fHCount*fVCount); |
1513 | for (int i = 0; i < fGrid.count(); ++i) { |
1514 | fGrid[i].reset(); |
1515 | } |
1516 | |
1517 | return true; |
1518 | } |
1519 | |
1520 | void add(TriangulationVertex* v) { |
1521 | int index = hash(v); |
1522 | fGrid[index].addToTail(v); |
1523 | ++fNumVerts; |
1524 | } |
1525 | |
1526 | void remove(TriangulationVertex* v) { |
1527 | int index = hash(v); |
1528 | fGrid[index].remove(v); |
1529 | --fNumVerts; |
1530 | } |
1531 | |
1532 | bool checkTriangle(const SkPoint& p0, const SkPoint& p1, const SkPoint& p2, |
1533 | uint16_t ignoreIndex0, uint16_t ignoreIndex1) const { |
1534 | if (!fNumVerts) { |
1535 | return false; |
1536 | } |
1537 | |
1538 | SkRect triBounds; |
1539 | compute_triangle_bounds(p0, p1, p2, &triBounds); |
1540 | int h0 = (triBounds.fLeft - fBounds.fLeft)*fGridConversion.fX; |
1541 | int h1 = (triBounds.fRight - fBounds.fLeft)*fGridConversion.fX; |
1542 | int v0 = (triBounds.fTop - fBounds.fTop)*fGridConversion.fY; |
1543 | int v1 = (triBounds.fBottom - fBounds.fTop)*fGridConversion.fY; |
1544 | |
1545 | for (int v = v0; v <= v1; ++v) { |
1546 | for (int h = h0; h <= h1; ++h) { |
1547 | int i = v * fHCount + h; |
1548 | for (SkTInternalLList<TriangulationVertex>::Iter reflexIter = fGrid[i].begin(); |
1549 | reflexIter != fGrid[i].end(); ++reflexIter) { |
1550 | TriangulationVertex* reflexVertex = *reflexIter; |
1551 | if (reflexVertex->fIndex != ignoreIndex0 && |
1552 | reflexVertex->fIndex != ignoreIndex1 && |
1553 | point_in_triangle(p0, p1, p2, reflexVertex->fPosition)) { |
1554 | return true; |
1555 | } |
1556 | } |
1557 | |
1558 | } |
1559 | } |
1560 | |
1561 | return false; |
1562 | } |
1563 | |
1564 | private: |
1565 | int hash(TriangulationVertex* vert) const { |
1566 | int h = (vert->fPosition.fX - fBounds.fLeft)*fGridConversion.fX; |
1567 | int v = (vert->fPosition.fY - fBounds.fTop)*fGridConversion.fY; |
1568 | SkASSERT(v*fHCount + h >= 0); |
1569 | return v*fHCount + h; |
1570 | } |
1571 | |
1572 | SkRect fBounds; |
1573 | int fHCount; |
1574 | int fVCount; |
1575 | int fNumVerts; |
1576 | // converts distance from the origin to a grid location (when cast to int) |
1577 | SkVector fGridConversion; |
1578 | SkTDArray<SkTInternalLList<TriangulationVertex>> fGrid; |
1579 | }; |
1580 | |
1581 | // Check to see if a reflex vertex has become a convex vertex after clipping an ear |
1582 | static void reclassify_vertex(TriangulationVertex* p, const SkPoint* polygonVerts, |
1583 | int winding, ReflexHash* reflexHash, |
1584 | SkTInternalLList<TriangulationVertex>* convexList) { |
1585 | if (TriangulationVertex::VertexType::kReflex == p->fVertexType) { |
1586 | SkVector v0 = p->fPosition - polygonVerts[p->fPrevIndex]; |
1587 | SkVector v1 = polygonVerts[p->fNextIndex] - p->fPosition; |
1588 | if (winding*v0.cross(v1) > SK_ScalarNearlyZero*SK_ScalarNearlyZero) { |
1589 | p->fVertexType = TriangulationVertex::VertexType::kConvex; |
1590 | reflexHash->remove(p); |
1591 | p->fPrev = p->fNext = nullptr; |
1592 | convexList->addToTail(p); |
1593 | } |
1594 | } |
1595 | } |
1596 | |
1597 | bool SkTriangulateSimplePolygon(const SkPoint* polygonVerts, uint16_t* indexMap, int polygonSize, |
1598 | SkTDArray<uint16_t>* triangleIndices) { |
1599 | if (polygonSize < 3) { |
1600 | return false; |
1601 | } |
1602 | // need to be able to represent all the vertices in the 16-bit indices |
1603 | if (polygonSize >= std::numeric_limits<uint16_t>::max()) { |
1604 | return false; |
1605 | } |
1606 | |
1607 | // get bounds |
1608 | SkRect bounds; |
1609 | if (!bounds.setBoundsCheck(polygonVerts, polygonSize)) { |
1610 | return false; |
1611 | } |
1612 | // get winding direction |
1613 | // TODO: we do this for all the polygon routines -- might be better to have the client |
1614 | // compute it and pass it in |
1615 | int winding = SkGetPolygonWinding(polygonVerts, polygonSize); |
1616 | if (0 == winding) { |
1617 | return false; |
1618 | } |
1619 | |
1620 | // Set up vertices |
1621 | SkAutoSTMalloc<64, TriangulationVertex> triangulationVertices(polygonSize); |
1622 | int prevIndex = polygonSize - 1; |
1623 | SkVector v0 = polygonVerts[0] - polygonVerts[prevIndex]; |
1624 | for (int currIndex = 0; currIndex < polygonSize; ++currIndex) { |
1625 | int nextIndex = (currIndex + 1) % polygonSize; |
1626 | |
1627 | triangulationVertices[currIndex] = TriangulationVertex{}; |
1628 | triangulationVertices[currIndex].fPosition = polygonVerts[currIndex]; |
1629 | triangulationVertices[currIndex].fIndex = currIndex; |
1630 | triangulationVertices[currIndex].fPrevIndex = prevIndex; |
1631 | triangulationVertices[currIndex].fNextIndex = nextIndex; |
1632 | SkVector v1 = polygonVerts[nextIndex] - polygonVerts[currIndex]; |
1633 | if (winding*v0.cross(v1) > SK_ScalarNearlyZero*SK_ScalarNearlyZero) { |
1634 | triangulationVertices[currIndex].fVertexType = TriangulationVertex::VertexType::kConvex; |
1635 | } else { |
1636 | triangulationVertices[currIndex].fVertexType = TriangulationVertex::VertexType::kReflex; |
1637 | } |
1638 | |
1639 | prevIndex = currIndex; |
1640 | v0 = v1; |
1641 | } |
1642 | |
1643 | // Classify initial vertices into a list of convex vertices and a hash of reflex vertices |
1644 | // TODO: possibly sort the convexList in some way to get better triangles |
1645 | SkTInternalLList<TriangulationVertex> convexList; |
1646 | ReflexHash reflexHash; |
1647 | if (!reflexHash.init(bounds, polygonSize)) { |
1648 | return false; |
1649 | } |
1650 | prevIndex = polygonSize - 1; |
1651 | for (int currIndex = 0; currIndex < polygonSize; prevIndex = currIndex, ++currIndex) { |
1652 | TriangulationVertex::VertexType currType = triangulationVertices[currIndex].fVertexType; |
1653 | if (TriangulationVertex::VertexType::kConvex == currType) { |
1654 | int nextIndex = (currIndex + 1) % polygonSize; |
1655 | TriangulationVertex::VertexType prevType = triangulationVertices[prevIndex].fVertexType; |
1656 | TriangulationVertex::VertexType nextType = triangulationVertices[nextIndex].fVertexType; |
1657 | // We prioritize clipping vertices with neighboring reflex vertices. |
1658 | // The intent here is that it will cull reflex vertices more quickly. |
1659 | if (TriangulationVertex::VertexType::kReflex == prevType || |
1660 | TriangulationVertex::VertexType::kReflex == nextType) { |
1661 | convexList.addToHead(&triangulationVertices[currIndex]); |
1662 | } else { |
1663 | convexList.addToTail(&triangulationVertices[currIndex]); |
1664 | } |
1665 | } else { |
1666 | // We treat near collinear vertices as reflex |
1667 | reflexHash.add(&triangulationVertices[currIndex]); |
1668 | } |
1669 | } |
1670 | |
1671 | // The general concept: We are trying to find three neighboring vertices where |
1672 | // no other vertex lies inside the triangle (an "ear"). If we find one, we clip |
1673 | // that ear off, and then repeat on the new polygon. Once we get down to three vertices |
1674 | // we have triangulated the entire polygon. |
1675 | // In the worst case this is an n^2 algorithm. We can cut down the search space somewhat by |
1676 | // noting that only convex vertices can be potential ears, and we only need to check whether |
1677 | // any reflex vertices lie inside the ear. |
1678 | triangleIndices->setReserve(triangleIndices->count() + 3 * (polygonSize - 2)); |
1679 | int vertexCount = polygonSize; |
1680 | while (vertexCount > 3) { |
1681 | bool success = false; |
1682 | TriangulationVertex* earVertex = nullptr; |
1683 | TriangulationVertex* p0 = nullptr; |
1684 | TriangulationVertex* p2 = nullptr; |
1685 | // find a convex vertex to clip |
1686 | for (SkTInternalLList<TriangulationVertex>::Iter convexIter = convexList.begin(); |
1687 | convexIter != convexList.end(); ++convexIter) { |
1688 | earVertex = *convexIter; |
1689 | SkASSERT(TriangulationVertex::VertexType::kReflex != earVertex->fVertexType); |
1690 | |
1691 | p0 = &triangulationVertices[earVertex->fPrevIndex]; |
1692 | p2 = &triangulationVertices[earVertex->fNextIndex]; |
1693 | |
1694 | // see if any reflex vertices are inside the ear |
1695 | bool failed = reflexHash.checkTriangle(p0->fPosition, earVertex->fPosition, |
1696 | p2->fPosition, p0->fIndex, p2->fIndex); |
1697 | if (failed) { |
1698 | continue; |
1699 | } |
1700 | |
1701 | // found one we can clip |
1702 | success = true; |
1703 | break; |
1704 | } |
1705 | // If we can't find any ears to clip, this probably isn't a simple polygon |
1706 | if (!success) { |
1707 | return false; |
1708 | } |
1709 | |
1710 | // add indices |
1711 | auto indices = triangleIndices->append(3); |
1712 | indices[0] = indexMap[p0->fIndex]; |
1713 | indices[1] = indexMap[earVertex->fIndex]; |
1714 | indices[2] = indexMap[p2->fIndex]; |
1715 | |
1716 | // clip the ear |
1717 | convexList.remove(earVertex); |
1718 | --vertexCount; |
1719 | |
1720 | // reclassify reflex verts |
1721 | p0->fNextIndex = earVertex->fNextIndex; |
1722 | reclassify_vertex(p0, polygonVerts, winding, &reflexHash, &convexList); |
1723 | |
1724 | p2->fPrevIndex = earVertex->fPrevIndex; |
1725 | reclassify_vertex(p2, polygonVerts, winding, &reflexHash, &convexList); |
1726 | } |
1727 | |
1728 | // output indices |
1729 | for (SkTInternalLList<TriangulationVertex>::Iter vertexIter = convexList.begin(); |
1730 | vertexIter != convexList.end(); ++vertexIter) { |
1731 | TriangulationVertex* vertex = *vertexIter; |
1732 | *triangleIndices->push() = indexMap[vertex->fIndex]; |
1733 | } |
1734 | |
1735 | return true; |
1736 | } |
1737 | |
1738 | /////////// |
1739 | |
1740 | static double crs(SkVector a, SkVector b) { |
1741 | return a.fX * b.fY - a.fY * b.fX; |
1742 | } |
1743 | |
1744 | static int sign(SkScalar v) { |
1745 | return v < 0 ? -1 : (v > 0); |
1746 | } |
1747 | |
1748 | struct SignTracker { |
1749 | int fSign; |
1750 | int fSignChanges; |
1751 | |
1752 | void reset() { |
1753 | fSign = 0; |
1754 | fSignChanges = 0; |
1755 | } |
1756 | |
1757 | void init(int s) { |
1758 | SkASSERT(fSignChanges == 0); |
1759 | SkASSERT(s == 1 || s == -1 || s == 0); |
1760 | fSign = s; |
1761 | fSignChanges = 1; |
1762 | } |
1763 | |
1764 | void update(int s) { |
1765 | if (s) { |
1766 | if (fSign != s) { |
1767 | fSignChanges += 1; |
1768 | fSign = s; |
1769 | } |
1770 | } |
1771 | } |
1772 | }; |
1773 | |
1774 | struct { |
1775 | SkVector , ; |
1776 | SignTracker , ; |
1777 | int ; |
1778 | bool ; |
1779 | |
1780 | () { this->reset(); } |
1781 | |
1782 | void () { |
1783 | fPrev = {0, 0}; |
1784 | fDSign.reset(); |
1785 | fCSign.reset(); |
1786 | fVecCounter = 0; |
1787 | fIsConcave = false; |
1788 | } |
1789 | |
1790 | void (SkPoint p1, SkPoint p0) { |
1791 | this->addVec(p1 - p0); |
1792 | } |
1793 | void (SkVector v) { |
1794 | if (v.fX == 0 && v.fY == 0) { |
1795 | return; |
1796 | } |
1797 | |
1798 | fVecCounter += 1; |
1799 | if (fVecCounter == 1) { |
1800 | fFirst = fPrev = v; |
1801 | fDSign.update(sign(v.fX)); |
1802 | return; |
1803 | } |
1804 | |
1805 | SkScalar d = v.fX; |
1806 | SkScalar c = crs(fPrev, v); |
1807 | int sign_c; |
1808 | if (c) { |
1809 | sign_c = sign(c); |
1810 | } else { |
1811 | if (d >= 0) { |
1812 | sign_c = fCSign.fSign; |
1813 | } else { |
1814 | sign_c = -fCSign.fSign; |
1815 | } |
1816 | } |
1817 | |
1818 | fDSign.update(sign(d)); |
1819 | fCSign.update(sign_c); |
1820 | fPrev = v; |
1821 | |
1822 | if (fDSign.fSignChanges > 3 || fCSign.fSignChanges > 1) { |
1823 | fIsConcave = true; |
1824 | } |
1825 | } |
1826 | |
1827 | void () { |
1828 | this->addVec(fFirst); |
1829 | } |
1830 | }; |
1831 | |
1832 | bool SkIsPolyConvex_experimental(const SkPoint pts[], int count) { |
1833 | if (count <= 3) { |
1834 | return true; |
1835 | } |
1836 | |
1837 | ConvexTracker tracker; |
1838 | |
1839 | for (int i = 0; i < count - 1; ++i) { |
1840 | tracker.addVec(pts[i + 1], pts[i]); |
1841 | if (tracker.fIsConcave) { |
1842 | return false; |
1843 | } |
1844 | } |
1845 | tracker.addVec(pts[0], pts[count - 1]); |
1846 | tracker.finalCross(); |
1847 | return !tracker.fIsConcave; |
1848 | } |
1849 | |
1850 | |