blob: e1472113d25f4069db13e54aaf35372ee13a37c8 [file] [log] [blame]
Chris Craikb565df12015-10-05 13:00:52 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "OpReorderer.h"
18
Chris Craik0b7e8242015-10-28 16:50:44 -070019#include "LayerUpdateQueue.h"
Chris Craik161f54b2015-11-05 11:08:52 -080020#include "RenderNode.h"
Chris Craik98787e62015-11-13 10:55:30 -080021#include "renderstate/OffscreenBufferPool.h"
Chris Craik161f54b2015-11-05 11:08:52 -080022#include "utils/FatVector.h"
23#include "utils/PaintUtils.h"
Chris Craik8ecf41c2015-11-16 10:27:59 -080024#include "utils/TraceUtils.h"
Chris Craikb565df12015-10-05 13:00:52 -070025
Chris Craik161f54b2015-11-05 11:08:52 -080026#include <SkCanvas.h>
Chris Craikd3daa312015-11-06 10:59:56 -080027#include <SkPathOps.h>
Chris Craik161f54b2015-11-05 11:08:52 -080028#include <utils/TypeHelpers.h>
Chris Craikb565df12015-10-05 13:00:52 -070029
30namespace android {
31namespace uirenderer {
32
33class BatchBase {
34
35public:
36 BatchBase(batchid_t batchId, BakedOpState* op, bool merging)
Chris Craik98787e62015-11-13 10:55:30 -080037 : mBatchId(batchId)
38 , mMerging(merging) {
Chris Craikb565df12015-10-05 13:00:52 -070039 mBounds = op->computedState.clippedBounds;
40 mOps.push_back(op);
41 }
42
43 bool intersects(const Rect& rect) const {
44 if (!rect.intersects(mBounds)) return false;
45
46 for (const BakedOpState* op : mOps) {
47 if (rect.intersects(op->computedState.clippedBounds)) {
48 return true;
49 }
50 }
51 return false;
52 }
53
54 batchid_t getBatchId() const { return mBatchId; }
55 bool isMerging() const { return mMerging; }
56
57 const std::vector<BakedOpState*>& getOps() const { return mOps; }
58
59 void dump() const {
Chris Craik6fe991e52015-10-20 09:39:42 -070060 ALOGD(" Batch %p, id %d, merging %d, count %d, bounds " RECT_STRING,
61 this, mBatchId, mMerging, mOps.size(), RECT_ARGS(mBounds));
Chris Craikb565df12015-10-05 13:00:52 -070062 }
63protected:
64 batchid_t mBatchId;
65 Rect mBounds;
66 std::vector<BakedOpState*> mOps;
67 bool mMerging;
68};
69
70class OpBatch : public BatchBase {
71public:
72 static void* operator new(size_t size, LinearAllocator& allocator) {
73 return allocator.alloc(size);
74 }
75
76 OpBatch(batchid_t batchId, BakedOpState* op)
77 : BatchBase(batchId, op, false) {
78 }
79
80 void batchOp(BakedOpState* op) {
81 mBounds.unionWith(op->computedState.clippedBounds);
82 mOps.push_back(op);
83 }
84};
85
86class MergingOpBatch : public BatchBase {
87public:
88 static void* operator new(size_t size, LinearAllocator& allocator) {
89 return allocator.alloc(size);
90 }
91
92 MergingOpBatch(batchid_t batchId, BakedOpState* op)
Chris Craikd7448e62015-12-15 10:34:36 -080093 : BatchBase(batchId, op, true)
94 , mClipSideFlags(op->computedState.clipSideFlags)
95 , mClipRect(op->computedState.clipRect) {
Chris Craikb565df12015-10-05 13:00:52 -070096 }
97
98 /*
99 * Helper for determining if a new op can merge with a MergingDrawBatch based on their bounds
100 * and clip side flags. Positive bounds delta means new bounds fit in old.
101 */
102 static inline bool checkSide(const int currentFlags, const int newFlags, const int side,
103 float boundsDelta) {
104 bool currentClipExists = currentFlags & side;
105 bool newClipExists = newFlags & side;
106
107 // if current is clipped, we must be able to fit new bounds in current
108 if (boundsDelta > 0 && currentClipExists) return false;
109
110 // if new is clipped, we must be able to fit current bounds in new
111 if (boundsDelta < 0 && newClipExists) return false;
112
113 return true;
114 }
115
116 static bool paintIsDefault(const SkPaint& paint) {
117 return paint.getAlpha() == 255
118 && paint.getColorFilter() == nullptr
119 && paint.getShader() == nullptr;
120 }
121
122 static bool paintsAreEquivalent(const SkPaint& a, const SkPaint& b) {
123 return a.getAlpha() == b.getAlpha()
124 && a.getColorFilter() == b.getColorFilter()
125 && a.getShader() == b.getShader();
126 }
127
128 /*
129 * Checks if a (mergeable) op can be merged into this batch
130 *
131 * If true, the op's multiDraw must be guaranteed to handle both ops simultaneously, so it is
132 * important to consider all paint attributes used in the draw calls in deciding both a) if an
133 * op tries to merge at all, and b) if the op can merge with another set of ops
134 *
135 * False positives can lead to information from the paints of subsequent merged operations being
136 * dropped, so we make simplifying qualifications on the ops that can merge, per op type.
137 */
138 bool canMergeWith(BakedOpState* op) const {
139 bool isTextBatch = getBatchId() == OpBatchType::Text
140 || getBatchId() == OpBatchType::ColorText;
141
142 // Overlapping other operations is only allowed for text without shadow. For other ops,
143 // multiDraw isn't guaranteed to overdraw correctly
144 if (!isTextBatch || PaintUtils::hasTextShadow(op->op->paint)) {
145 if (intersects(op->computedState.clippedBounds)) return false;
146 }
147
148 const BakedOpState* lhs = op;
149 const BakedOpState* rhs = mOps[0];
150
151 if (!MathUtils::areEqual(lhs->alpha, rhs->alpha)) return false;
152
153 // Identical round rect clip state means both ops will clip in the same way, or not at all.
154 // As the state objects are const, we can compare their pointers to determine mergeability
155 if (lhs->roundRectClipState != rhs->roundRectClipState) return false;
156 if (lhs->projectionPathMask != rhs->projectionPathMask) return false;
157
158 /* Clipping compatibility check
159 *
160 * Exploits the fact that if a op or batch is clipped on a side, its bounds will equal its
161 * clip for that side.
162 */
163 const int currentFlags = mClipSideFlags;
164 const int newFlags = op->computedState.clipSideFlags;
165 if (currentFlags != OpClipSideFlags::None || newFlags != OpClipSideFlags::None) {
166 const Rect& opBounds = op->computedState.clippedBounds;
167 float boundsDelta = mBounds.left - opBounds.left;
168 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Left, boundsDelta)) return false;
169 boundsDelta = mBounds.top - opBounds.top;
170 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Top, boundsDelta)) return false;
171
172 // right and bottom delta calculation reversed to account for direction
173 boundsDelta = opBounds.right - mBounds.right;
174 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Right, boundsDelta)) return false;
175 boundsDelta = opBounds.bottom - mBounds.bottom;
176 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Bottom, boundsDelta)) return false;
177 }
178
179 const SkPaint* newPaint = op->op->paint;
180 const SkPaint* oldPaint = mOps[0]->op->paint;
181
182 if (newPaint == oldPaint) {
183 // if paints are equal, then modifiers + paint attribs don't need to be compared
184 return true;
185 } else if (newPaint && !oldPaint) {
186 return paintIsDefault(*newPaint);
187 } else if (!newPaint && oldPaint) {
188 return paintIsDefault(*oldPaint);
189 }
190 return paintsAreEquivalent(*newPaint, *oldPaint);
191 }
192
193 void mergeOp(BakedOpState* op) {
194 mBounds.unionWith(op->computedState.clippedBounds);
195 mOps.push_back(op);
196
197 const int newClipSideFlags = op->computedState.clipSideFlags;
198 mClipSideFlags |= newClipSideFlags;
199
200 const Rect& opClip = op->computedState.clipRect;
201 if (newClipSideFlags & OpClipSideFlags::Left) mClipRect.left = opClip.left;
202 if (newClipSideFlags & OpClipSideFlags::Top) mClipRect.top = opClip.top;
203 if (newClipSideFlags & OpClipSideFlags::Right) mClipRect.right = opClip.right;
204 if (newClipSideFlags & OpClipSideFlags::Bottom) mClipRect.bottom = opClip.bottom;
205 }
206
Chris Craikd7448e62015-12-15 10:34:36 -0800207 int getClipSideFlags() const { return mClipSideFlags; }
Chris Craik15c3f192015-12-03 12:16:56 -0800208 const Rect& getClipRect() const { return mClipRect; }
209
Chris Craikb565df12015-10-05 13:00:52 -0700210private:
Chris Craikd7448e62015-12-15 10:34:36 -0800211 int mClipSideFlags;
Chris Craikb565df12015-10-05 13:00:52 -0700212 Rect mClipRect;
213};
214
Chris Craik0b7e8242015-10-28 16:50:44 -0700215OpReorderer::LayerReorderer::LayerReorderer(uint32_t width, uint32_t height,
Chris Craik98787e62015-11-13 10:55:30 -0800216 const Rect& repaintRect, const BeginLayerOp* beginLayerOp, RenderNode* renderNode)
Chris Craik0b7e8242015-10-28 16:50:44 -0700217 : width(width)
218 , height(height)
Chris Craik98787e62015-11-13 10:55:30 -0800219 , repaintRect(repaintRect)
Chris Craik0b7e8242015-10-28 16:50:44 -0700220 , offscreenBuffer(renderNode ? renderNode->getLayer() : nullptr)
221 , beginLayerOp(beginLayerOp)
222 , renderNode(renderNode) {}
223
Chris Craik6fe991e52015-10-20 09:39:42 -0700224// iterate back toward target to see if anything drawn since should overlap the new op
Chris Craik818c9fb2015-10-23 14:33:42 -0700225// if no target, merging ops still iterate to find similar batch to insert after
Chris Craik6fe991e52015-10-20 09:39:42 -0700226void OpReorderer::LayerReorderer::locateInsertIndex(int batchId, const Rect& clippedBounds,
227 BatchBase** targetBatch, size_t* insertBatchIndex) const {
228 for (int i = mBatches.size() - 1; i >= 0; i--) {
229 BatchBase* overBatch = mBatches[i];
230
231 if (overBatch == *targetBatch) break;
232
233 // TODO: also consider shader shared between batch types
234 if (batchId == overBatch->getBatchId()) {
235 *insertBatchIndex = i + 1;
236 if (!*targetBatch) break; // found insert position, quit
237 }
238
239 if (overBatch->intersects(clippedBounds)) {
240 // NOTE: it may be possible to optimize for special cases where two operations
241 // of the same batch/paint could swap order, such as with a non-mergeable
242 // (clipped) and a mergeable text operation
243 *targetBatch = nullptr;
244 break;
245 }
246 }
247}
248
249void OpReorderer::LayerReorderer::deferUnmergeableOp(LinearAllocator& allocator,
250 BakedOpState* op, batchid_t batchId) {
251 OpBatch* targetBatch = mBatchLookup[batchId];
252
253 size_t insertBatchIndex = mBatches.size();
254 if (targetBatch) {
255 locateInsertIndex(batchId, op->computedState.clippedBounds,
256 (BatchBase**)(&targetBatch), &insertBatchIndex);
257 }
258
259 if (targetBatch) {
260 targetBatch->batchOp(op);
261 } else {
262 // new non-merging batch
263 targetBatch = new (allocator) OpBatch(batchId, op);
264 mBatchLookup[batchId] = targetBatch;
265 mBatches.insert(mBatches.begin() + insertBatchIndex, targetBatch);
266 }
267}
268
269// insertion point of a new batch, will hopefully be immediately after similar batch
270// (generally, should be similar shader)
271void OpReorderer::LayerReorderer::deferMergeableOp(LinearAllocator& allocator,
272 BakedOpState* op, batchid_t batchId, mergeid_t mergeId) {
273 MergingOpBatch* targetBatch = nullptr;
274
275 // Try to merge with any existing batch with same mergeId
276 auto getResult = mMergingBatchLookup[batchId].find(mergeId);
277 if (getResult != mMergingBatchLookup[batchId].end()) {
278 targetBatch = getResult->second;
279 if (!targetBatch->canMergeWith(op)) {
280 targetBatch = nullptr;
281 }
282 }
283
284 size_t insertBatchIndex = mBatches.size();
285 locateInsertIndex(batchId, op->computedState.clippedBounds,
286 (BatchBase**)(&targetBatch), &insertBatchIndex);
287
288 if (targetBatch) {
289 targetBatch->mergeOp(op);
290 } else {
291 // new merging batch
292 targetBatch = new (allocator) MergingOpBatch(batchId, op);
293 mMergingBatchLookup[batchId].insert(std::make_pair(mergeId, targetBatch));
294
295 mBatches.insert(mBatches.begin() + insertBatchIndex, targetBatch);
296 }
297}
298
Chris Craik15c3f192015-12-03 12:16:56 -0800299void OpReorderer::LayerReorderer::replayBakedOpsImpl(void* arg,
300 BakedOpReceiver* unmergedReceivers, MergedOpReceiver* mergedReceivers) const {
Chris Craik5854b342015-10-26 15:49:56 -0700301 ATRACE_NAME("flush drawing commands");
Chris Craik6fe991e52015-10-20 09:39:42 -0700302 for (const BatchBase* batch : mBatches) {
Chris Craik15c3f192015-12-03 12:16:56 -0800303 size_t size = batch->getOps().size();
304 if (size > 1 && batch->isMerging()) {
305 int opId = batch->getOps()[0]->op->opId;
306 const MergingOpBatch* mergingBatch = static_cast<const MergingOpBatch*>(batch);
307 MergedBakedOpList data = {
308 batch->getOps().data(),
309 size,
310 mergingBatch->getClipSideFlags(),
311 mergingBatch->getClipRect()
312 };
Chris Craik15c3f192015-12-03 12:16:56 -0800313 mergedReceivers[opId](arg, data);
314 } else {
315 for (const BakedOpState* op : batch->getOps()) {
316 unmergedReceivers[op->op->opId](arg, *op);
317 }
Chris Craik6fe991e52015-10-20 09:39:42 -0700318 }
319 }
320}
321
322void OpReorderer::LayerReorderer::dump() const {
Chris Craik0b7e8242015-10-28 16:50:44 -0700323 ALOGD("LayerReorderer %p, %ux%u buffer %p, blo %p, rn %p",
324 this, width, height, offscreenBuffer, beginLayerOp, renderNode);
Chris Craik6fe991e52015-10-20 09:39:42 -0700325 for (const BatchBase* batch : mBatches) {
326 batch->dump();
327 }
328}
Chris Craikb565df12015-10-05 13:00:52 -0700329
Chris Craik0b7e8242015-10-28 16:50:44 -0700330OpReorderer::OpReorderer(const LayerUpdateQueue& layers, const SkRect& clip,
331 uint32_t viewportWidth, uint32_t viewportHeight,
Chris Craik98787e62015-11-13 10:55:30 -0800332 const std::vector< sp<RenderNode> >& nodes, const Vector3& lightCenter)
Chris Craik6fe991e52015-10-20 09:39:42 -0700333 : mCanvasState(*this) {
Chris Craik818c9fb2015-10-23 14:33:42 -0700334 ATRACE_NAME("prepare drawing commands");
Chris Craikb565df12015-10-05 13:00:52 -0700335
Chris Craik98787e62015-11-13 10:55:30 -0800336 mLayerReorderers.reserve(layers.entries().size());
337 mLayerStack.reserve(layers.entries().size());
338
339 // Prepare to defer Fbo0
340 mLayerReorderers.emplace_back(viewportWidth, viewportHeight, Rect(clip));
341 mLayerStack.push_back(0);
Chris Craikb565df12015-10-05 13:00:52 -0700342 mCanvasState.initializeSaveStack(viewportWidth, viewportHeight,
Chris Craikddf22152015-10-14 17:42:47 -0700343 clip.fLeft, clip.fTop, clip.fRight, clip.fBottom,
Chris Craik98787e62015-11-13 10:55:30 -0800344 lightCenter);
Chris Craik0b7e8242015-10-28 16:50:44 -0700345
346 // Render all layers to be updated, in order. Defer in reverse order, so that they'll be
347 // updated in the order they're passed in (mLayerReorderers are issued to Renderer in reverse)
348 for (int i = layers.entries().size() - 1; i >= 0; i--) {
349 RenderNode* layerNode = layers.entries()[i].renderNode;
350 const Rect& layerDamage = layers.entries()[i].damage;
Chris Craik8d1f2122015-11-24 16:40:09 -0800351 layerNode->computeOrdering();
Chris Craik0b7e8242015-10-28 16:50:44 -0700352
Chris Craik8ecf41c2015-11-16 10:27:59 -0800353 // map current light center into RenderNode's coordinate space
354 Vector3 lightCenter = mCanvasState.currentSnapshot()->getRelativeLightCenter();
355 layerNode->getLayer()->inverseTransformInWindow.mapPoint3d(lightCenter);
356
357 saveForLayer(layerNode->getWidth(), layerNode->getHeight(), 0, 0,
358 layerDamage, lightCenter, nullptr, layerNode);
Chris Craik0b7e8242015-10-28 16:50:44 -0700359
360 if (layerNode->getDisplayList()) {
Chris Craik8d1f2122015-11-24 16:40:09 -0800361 deferNodeOps(*layerNode);
Chris Craik0b7e8242015-10-28 16:50:44 -0700362 }
363 restoreForLayer();
364 }
365
366 // Defer Fbo0
Chris Craikb565df12015-10-05 13:00:52 -0700367 for (const sp<RenderNode>& node : nodes) {
368 if (node->nothingToDraw()) continue;
Chris Craik8d1f2122015-11-24 16:40:09 -0800369 node->computeOrdering();
Chris Craikb565df12015-10-05 13:00:52 -0700370
Chris Craik0b7e8242015-10-28 16:50:44 -0700371 int count = mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
372 deferNodePropsAndOps(*node);
373 mCanvasState.restoreToCount(count);
Chris Craikb565df12015-10-05 13:00:52 -0700374 }
375}
376
Chris Craik818c9fb2015-10-23 14:33:42 -0700377void OpReorderer::onViewportInitialized() {}
378
379void OpReorderer::onSnapshotRestored(const Snapshot& removed, const Snapshot& restored) {}
380
Chris Craik0b7e8242015-10-28 16:50:44 -0700381void OpReorderer::deferNodePropsAndOps(RenderNode& node) {
Chris Craik8ecf41c2015-11-16 10:27:59 -0800382 const RenderProperties& properties = node.properties();
383 const Outline& outline = properties.getOutline();
384 if (properties.getAlpha() <= 0
385 || (outline.getShouldClip() && outline.isEmpty())
386 || properties.getScaleX() == 0
387 || properties.getScaleY() == 0) {
388 return; // rejected
389 }
390
391 if (properties.getLeft() != 0 || properties.getTop() != 0) {
392 mCanvasState.translate(properties.getLeft(), properties.getTop());
393 }
394 if (properties.getStaticMatrix()) {
395 mCanvasState.concatMatrix(*properties.getStaticMatrix());
396 } else if (properties.getAnimationMatrix()) {
397 mCanvasState.concatMatrix(*properties.getAnimationMatrix());
398 }
399 if (properties.hasTransformMatrix()) {
400 if (properties.isTransformTranslateOnly()) {
401 mCanvasState.translate(properties.getTranslationX(), properties.getTranslationY());
402 } else {
403 mCanvasState.concatMatrix(*properties.getTransformMatrix());
404 }
405 }
406
407 const int width = properties.getWidth();
408 const int height = properties.getHeight();
409
410 Rect saveLayerBounds; // will be set to non-empty if saveLayer needed
411 const bool isLayer = properties.effectiveLayerType() != LayerType::None;
412 int clipFlags = properties.getClippingFlags();
413 if (properties.getAlpha() < 1) {
414 if (isLayer) {
415 clipFlags &= ~CLIP_TO_BOUNDS; // bounds clipping done by layer
416 }
417 if (CC_LIKELY(isLayer || !properties.getHasOverlappingRendering())) {
418 // simply scale rendering content's alpha
419 mCanvasState.scaleAlpha(properties.getAlpha());
420 } else {
421 // schedule saveLayer by initializing saveLayerBounds
422 saveLayerBounds.set(0, 0, width, height);
423 if (clipFlags) {
424 properties.getClippingRectForFlags(clipFlags, &saveLayerBounds);
425 clipFlags = 0; // all clipping done by savelayer
426 }
427 }
428
429 if (CC_UNLIKELY(ATRACE_ENABLED() && properties.promotedToLayer())) {
430 // pretend alpha always causes savelayer to warn about
431 // performance problem affecting old versions
432 ATRACE_FORMAT("%s alpha caused saveLayer %dx%d", node.getName(), width, height);
433 }
434 }
435 if (clipFlags) {
436 Rect clipRect;
437 properties.getClippingRectForFlags(clipFlags, &clipRect);
438 mCanvasState.clipRect(clipRect.left, clipRect.top, clipRect.right, clipRect.bottom,
439 SkRegion::kIntersect_Op);
440 }
441
442 if (properties.getRevealClip().willClip()) {
443 Rect bounds;
444 properties.getRevealClip().getBounds(&bounds);
445 mCanvasState.setClippingRoundRect(mAllocator,
446 bounds, properties.getRevealClip().getRadius());
447 } else if (properties.getOutline().willClip()) {
448 mCanvasState.setClippingOutline(mAllocator, &(properties.getOutline()));
449 }
450
451 if (!mCanvasState.quickRejectConservative(0, 0, width, height)) {
452 // not rejected, so defer render as either Layer, or direct (possibly wrapped in saveLayer)
Chris Craik0b7e8242015-10-28 16:50:44 -0700453 if (node.getLayer()) {
454 // HW layer
455 LayerOp* drawLayerOp = new (mAllocator) LayerOp(node);
456 BakedOpState* bakedOpState = tryBakeOpState(*drawLayerOp);
457 if (bakedOpState) {
Chris Craik8ecf41c2015-11-16 10:27:59 -0800458 // Node's layer already deferred, schedule it to render into parent layer
Chris Craik0b7e8242015-10-28 16:50:44 -0700459 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Bitmap);
460 }
Chris Craik8ecf41c2015-11-16 10:27:59 -0800461 } else if (CC_UNLIKELY(!saveLayerBounds.isEmpty())) {
462 // draw DisplayList contents within temporary, since persisted layer could not be used.
463 // (temp layers are clipped to viewport, since they don't persist offscreen content)
464 SkPaint saveLayerPaint;
465 saveLayerPaint.setAlpha(properties.getAlpha());
Chris Craik268a9c02015-12-09 18:05:12 -0800466 deferBeginLayerOp(*new (mAllocator) BeginLayerOp(
Chris Craik8ecf41c2015-11-16 10:27:59 -0800467 saveLayerBounds,
468 Matrix4::identity(),
469 saveLayerBounds,
470 &saveLayerPaint));
Chris Craik8d1f2122015-11-24 16:40:09 -0800471 deferNodeOps(node);
Chris Craik268a9c02015-12-09 18:05:12 -0800472 deferEndLayerOp(*new (mAllocator) EndLayerOp());
Chris Craik0b7e8242015-10-28 16:50:44 -0700473 } else {
Chris Craik8d1f2122015-11-24 16:40:09 -0800474 deferNodeOps(node);
Chris Craik0b7e8242015-10-28 16:50:44 -0700475 }
476 }
477}
478
Chris Craik161f54b2015-11-05 11:08:52 -0800479typedef key_value_pair_t<float, const RenderNodeOp*> ZRenderNodeOpPair;
480
481template <typename V>
482static void buildZSortedChildList(V* zTranslatedNodes,
483 const DisplayList& displayList, const DisplayList::Chunk& chunk) {
484 if (chunk.beginChildIndex == chunk.endChildIndex) return;
485
486 for (size_t i = chunk.beginChildIndex; i < chunk.endChildIndex; i++) {
487 RenderNodeOp* childOp = displayList.getChildren()[i];
488 RenderNode* child = childOp->renderNode;
489 float childZ = child->properties().getZ();
490
491 if (!MathUtils::isZero(childZ) && chunk.reorderChildren) {
492 zTranslatedNodes->push_back(ZRenderNodeOpPair(childZ, childOp));
493 childOp->skipInOrderDraw = true;
494 } else if (!child->properties().getProjectBackwards()) {
495 // regular, in order drawing DisplayList
496 childOp->skipInOrderDraw = false;
497 }
498 }
499
500 // Z sort any 3d children (stable-ness makes z compare fall back to standard drawing order)
501 std::stable_sort(zTranslatedNodes->begin(), zTranslatedNodes->end());
502}
503
504template <typename V>
505static size_t findNonNegativeIndex(const V& zTranslatedNodes) {
506 for (size_t i = 0; i < zTranslatedNodes.size(); i++) {
507 if (zTranslatedNodes[i].key >= 0.0f) return i;
508 }
509 return zTranslatedNodes.size();
510}
511
512template <typename V>
513void OpReorderer::defer3dChildren(ChildrenSelectMode mode, const V& zTranslatedNodes) {
514 const int size = zTranslatedNodes.size();
515 if (size == 0
516 || (mode == ChildrenSelectMode::Negative&& zTranslatedNodes[0].key > 0.0f)
517 || (mode == ChildrenSelectMode::Positive && zTranslatedNodes[size - 1].key < 0.0f)) {
518 // no 3d children to draw
519 return;
520 }
521
522 /**
523 * Draw shadows and (potential) casters mostly in order, but allow the shadows of casters
524 * with very similar Z heights to draw together.
525 *
526 * This way, if Views A & B have the same Z height and are both casting shadows, the shadows are
527 * underneath both, and neither's shadow is drawn on top of the other.
528 */
529 const size_t nonNegativeIndex = findNonNegativeIndex(zTranslatedNodes);
530 size_t drawIndex, shadowIndex, endIndex;
531 if (mode == ChildrenSelectMode::Negative) {
532 drawIndex = 0;
533 endIndex = nonNegativeIndex;
534 shadowIndex = endIndex; // draw no shadows
535 } else {
536 drawIndex = nonNegativeIndex;
537 endIndex = size;
538 shadowIndex = drawIndex; // potentially draw shadow for each pos Z child
539 }
540
541 float lastCasterZ = 0.0f;
542 while (shadowIndex < endIndex || drawIndex < endIndex) {
543 if (shadowIndex < endIndex) {
544 const RenderNodeOp* casterNodeOp = zTranslatedNodes[shadowIndex].value;
545 const float casterZ = zTranslatedNodes[shadowIndex].key;
546 // attempt to render the shadow if the caster about to be drawn is its caster,
547 // OR if its caster's Z value is similar to the previous potential caster
548 if (shadowIndex == drawIndex || casterZ - lastCasterZ < 0.1f) {
549 deferShadow(*casterNodeOp);
550
551 lastCasterZ = casterZ; // must do this even if current caster not casting a shadow
552 shadowIndex++;
553 continue;
554 }
555 }
556
557 const RenderNodeOp* childOp = zTranslatedNodes[drawIndex].value;
Chris Craik268a9c02015-12-09 18:05:12 -0800558 deferRenderNodeOpImpl(*childOp);
Chris Craik161f54b2015-11-05 11:08:52 -0800559 drawIndex++;
560 }
561}
562
563void OpReorderer::deferShadow(const RenderNodeOp& casterNodeOp) {
Chris Craikd3daa312015-11-06 10:59:56 -0800564 auto& node = *casterNodeOp.renderNode;
565 auto& properties = node.properties();
566
567 if (properties.getAlpha() <= 0.0f
568 || properties.getOutline().getAlpha() <= 0.0f
569 || !properties.getOutline().getPath()
570 || properties.getScaleX() == 0
571 || properties.getScaleY() == 0) {
572 // no shadow to draw
573 return;
574 }
575
576 const SkPath* casterOutlinePath = properties.getOutline().getPath();
577 const SkPath* revealClipPath = properties.getRevealClip().getPath();
578 if (revealClipPath && revealClipPath->isEmpty()) return;
579
580 float casterAlpha = properties.getAlpha() * properties.getOutline().getAlpha();
581
582 // holds temporary SkPath to store the result of intersections
583 SkPath* frameAllocatedPath = nullptr;
584 const SkPath* casterPath = casterOutlinePath;
585
586 // intersect the shadow-casting path with the reveal, if present
587 if (revealClipPath) {
588 frameAllocatedPath = createFrameAllocatedPath();
589
590 Op(*casterPath, *revealClipPath, kIntersect_SkPathOp, frameAllocatedPath);
591 casterPath = frameAllocatedPath;
592 }
593
594 // intersect the shadow-casting path with the clipBounds, if present
595 if (properties.getClippingFlags() & CLIP_TO_CLIP_BOUNDS) {
596 if (!frameAllocatedPath) {
597 frameAllocatedPath = createFrameAllocatedPath();
598 }
599 Rect clipBounds;
600 properties.getClippingRectForFlags(CLIP_TO_CLIP_BOUNDS, &clipBounds);
601 SkPath clipBoundsPath;
602 clipBoundsPath.addRect(clipBounds.left, clipBounds.top,
603 clipBounds.right, clipBounds.bottom);
604
605 Op(*casterPath, clipBoundsPath, kIntersect_SkPathOp, frameAllocatedPath);
606 casterPath = frameAllocatedPath;
607 }
608
609 ShadowOp* shadowOp = new (mAllocator) ShadowOp(casterNodeOp, casterAlpha, casterPath,
Chris Craik98787e62015-11-13 10:55:30 -0800610 mCanvasState.getLocalClipBounds(),
611 mCanvasState.currentSnapshot()->getRelativeLightCenter());
Chris Craikd3daa312015-11-06 10:59:56 -0800612 BakedOpState* bakedOpState = BakedOpState::tryShadowOpConstruct(
613 mAllocator, *mCanvasState.currentSnapshot(), shadowOp);
614 if (CC_LIKELY(bakedOpState)) {
615 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Shadow);
616 }
Chris Craik161f54b2015-11-05 11:08:52 -0800617}
Chris Craikd3daa312015-11-06 10:59:56 -0800618
Chris Craik8d1f2122015-11-24 16:40:09 -0800619void OpReorderer::deferProjectedChildren(const RenderNode& renderNode) {
620 const SkPath* projectionReceiverOutline = renderNode.properties().getOutline().getPath();
621 int count = mCanvasState.save(SkCanvas::kMatrix_SaveFlag | SkCanvas::kClip_SaveFlag);
622
623 // can't be null, since DL=null node rejection happens before deferNodePropsAndOps
624 const DisplayList& displayList = *(renderNode.getDisplayList());
625
626 const RecordedOp* op = (displayList.getOps()[displayList.projectionReceiveIndex]);
627 const RenderNodeOp* backgroundOp = static_cast<const RenderNodeOp*>(op);
628 const RenderProperties& backgroundProps = backgroundOp->renderNode->properties();
629
630 // Transform renderer to match background we're projecting onto
631 // (by offsetting canvas by translationX/Y of background rendernode, since only those are set)
632 mCanvasState.translate(backgroundProps.getTranslationX(), backgroundProps.getTranslationY());
633
634 // If the projection receiver has an outline, we mask projected content to it
635 // (which we know, apriori, are all tessellated paths)
636 mCanvasState.setProjectionPathMask(mAllocator, projectionReceiverOutline);
637
638 // draw projected nodes
639 for (size_t i = 0; i < renderNode.mProjectedNodes.size(); i++) {
640 RenderNodeOp* childOp = renderNode.mProjectedNodes[i];
641
642 int restoreTo = mCanvasState.save(SkCanvas::kMatrix_SaveFlag);
643 mCanvasState.concatMatrix(childOp->transformFromCompositingAncestor);
Chris Craik268a9c02015-12-09 18:05:12 -0800644 deferRenderNodeOpImpl(*childOp);
Chris Craik8d1f2122015-11-24 16:40:09 -0800645 mCanvasState.restoreToCount(restoreTo);
646 }
647
648 mCanvasState.restoreToCount(count);
649}
650
Chris Craikb565df12015-10-05 13:00:52 -0700651/**
Chris Craik268a9c02015-12-09 18:05:12 -0800652 * Used to define a list of lambdas referencing private OpReorderer::onXX::defer() methods.
Chris Craikb565df12015-10-05 13:00:52 -0700653 *
Chris Craik8d1f2122015-11-24 16:40:09 -0800654 * This allows opIds embedded in the RecordedOps to be used for dispatching to these lambdas.
655 * E.g. a BitmapOp op then would be dispatched to OpReorderer::onBitmapOp(const BitmapOp&)
Chris Craikb565df12015-10-05 13:00:52 -0700656 */
Chris Craik6fe991e52015-10-20 09:39:42 -0700657#define OP_RECEIVER(Type) \
Chris Craik268a9c02015-12-09 18:05:12 -0800658 [](OpReorderer& reorderer, const RecordedOp& op) { reorderer.defer##Type(static_cast<const Type&>(op)); },
Chris Craik8d1f2122015-11-24 16:40:09 -0800659void OpReorderer::deferNodeOps(const RenderNode& renderNode) {
Chris Craik15c3f192015-12-03 12:16:56 -0800660 typedef void (*OpDispatcher) (OpReorderer& reorderer, const RecordedOp& op);
661 static OpDispatcher receivers[] = {
Chris Craik6fe991e52015-10-20 09:39:42 -0700662 MAP_OPS(OP_RECEIVER)
Chris Craikb565df12015-10-05 13:00:52 -0700663 };
Chris Craik8d1f2122015-11-24 16:40:09 -0800664
665 // can't be null, since DL=null node rejection happens before deferNodePropsAndOps
666 const DisplayList& displayList = *(renderNode.getDisplayList());
Chris Craikb36af872015-10-16 14:23:12 -0700667 for (const DisplayList::Chunk& chunk : displayList.getChunks()) {
Chris Craik161f54b2015-11-05 11:08:52 -0800668 FatVector<ZRenderNodeOpPair, 16> zTranslatedNodes;
669 buildZSortedChildList(&zTranslatedNodes, displayList, chunk);
670
671 defer3dChildren(ChildrenSelectMode::Negative, zTranslatedNodes);
Chris Craikb565df12015-10-05 13:00:52 -0700672 for (size_t opIndex = chunk.beginOpIndex; opIndex < chunk.endOpIndex; opIndex++) {
Chris Craikb36af872015-10-16 14:23:12 -0700673 const RecordedOp* op = displayList.getOps()[opIndex];
Chris Craikb565df12015-10-05 13:00:52 -0700674 receivers[op->opId](*this, *op);
Chris Craik8d1f2122015-11-24 16:40:09 -0800675
676 if (CC_UNLIKELY(!renderNode.mProjectedNodes.empty()
677 && displayList.projectionReceiveIndex >= 0
678 && static_cast<int>(opIndex) == displayList.projectionReceiveIndex)) {
679 deferProjectedChildren(renderNode);
680 }
Chris Craikb565df12015-10-05 13:00:52 -0700681 }
Chris Craik161f54b2015-11-05 11:08:52 -0800682 defer3dChildren(ChildrenSelectMode::Positive, zTranslatedNodes);
Chris Craikb565df12015-10-05 13:00:52 -0700683 }
684}
685
Chris Craik268a9c02015-12-09 18:05:12 -0800686void OpReorderer::deferRenderNodeOpImpl(const RenderNodeOp& op) {
Chris Craik161f54b2015-11-05 11:08:52 -0800687 if (op.renderNode->nothingToDraw()) return;
Chris Craik6fe991e52015-10-20 09:39:42 -0700688 int count = mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
Chris Craikb565df12015-10-05 13:00:52 -0700689
690 // apply state from RecordedOp
691 mCanvasState.concatMatrix(op.localMatrix);
692 mCanvasState.clipRect(op.localClipRect.left, op.localClipRect.top,
693 op.localClipRect.right, op.localClipRect.bottom, SkRegion::kIntersect_Op);
694
Chris Craik0b7e8242015-10-28 16:50:44 -0700695 // then apply state from node properties, and defer ops
696 deferNodePropsAndOps(*op.renderNode);
697
Chris Craik6fe991e52015-10-20 09:39:42 -0700698 mCanvasState.restoreToCount(count);
Chris Craikb565df12015-10-05 13:00:52 -0700699}
700
Chris Craik268a9c02015-12-09 18:05:12 -0800701void OpReorderer::deferRenderNodeOp(const RenderNodeOp& op) {
Chris Craik161f54b2015-11-05 11:08:52 -0800702 if (!op.skipInOrderDraw) {
Chris Craik268a9c02015-12-09 18:05:12 -0800703 deferRenderNodeOpImpl(op);
Chris Craik161f54b2015-11-05 11:08:52 -0800704 }
705}
706
Chris Craik386aa032015-12-07 17:08:25 -0800707/**
708 * Defers an unmergeable, strokeable op, accounting correctly
709 * for paint's style on the bounds being computed.
710 */
Chris Craik268a9c02015-12-09 18:05:12 -0800711void OpReorderer::deferStrokeableOp(const RecordedOp& op, batchid_t batchId,
Chris Craik386aa032015-12-07 17:08:25 -0800712 BakedOpState::StrokeBehavior strokeBehavior) {
713 // Note: here we account for stroke when baking the op
714 BakedOpState* bakedState = BakedOpState::tryStrokeableOpConstruct(
715 mAllocator, *mCanvasState.currentSnapshot(), op, strokeBehavior);
716 if (!bakedState) return; // quick rejected
717 currentLayer().deferUnmergeableOp(mAllocator, bakedState, batchId);
718}
719
720/**
721 * Returns batch id for tessellatable shapes, based on paint. Checks to see if path effect/AA will
722 * be used, since they trigger significantly different rendering paths.
723 *
724 * Note: not used for lines/points, since they don't currently support path effects.
725 */
726static batchid_t tessBatchId(const RecordedOp& op) {
727 const SkPaint& paint = *(op.paint);
Chris Craikb565df12015-10-05 13:00:52 -0700728 return paint.getPathEffect()
729 ? OpBatchType::AlphaMaskTexture
730 : (paint.isAntiAlias() ? OpBatchType::AlphaVertices : OpBatchType::Vertices);
731}
732
Chris Craik268a9c02015-12-09 18:05:12 -0800733void OpReorderer::deferArcOp(const ArcOp& op) {
734 deferStrokeableOp(op, tessBatchId(op));
Chris Craik386aa032015-12-07 17:08:25 -0800735}
736
Chris Craik268a9c02015-12-09 18:05:12 -0800737void OpReorderer::deferBitmapOp(const BitmapOp& op) {
Chris Craik15c3f192015-12-03 12:16:56 -0800738 BakedOpState* bakedState = tryBakeOpState(op);
739 if (!bakedState) return; // quick rejected
Chris Craikb565df12015-10-05 13:00:52 -0700740
Chris Craik15c3f192015-12-03 12:16:56 -0800741 // Don't merge non-simply transformed or neg scale ops, SET_TEXTURE doesn't handle rotation
742 // Don't merge A8 bitmaps - the paint's color isn't compared by mergeId, or in
743 // MergingDrawBatch::canMergeWith()
744 if (bakedState->computedState.transform.isSimple()
745 && bakedState->computedState.transform.positiveScale()
746 && PaintUtils::getXfermodeDirect(op.paint) == SkXfermode::kSrcOver_Mode
747 && op.bitmap->colorType() != kAlpha_8_SkColorType) {
748 mergeid_t mergeId = (mergeid_t) op.bitmap->getGenerationID();
749 // TODO: AssetAtlas in mergeId
750 currentLayer().deferMergeableOp(mAllocator, bakedState, OpBatchType::Bitmap, mergeId);
751 } else {
752 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
753 }
Chris Craikb565df12015-10-05 13:00:52 -0700754}
755
Chris Craik268a9c02015-12-09 18:05:12 -0800756void OpReorderer::deferBitmapMeshOp(const BitmapMeshOp& op) {
Chris Craikf09ff5a2015-12-08 17:21:58 -0800757 BakedOpState* bakedState = tryBakeOpState(op);
758 if (!bakedState) return; // quick rejected
759 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
760}
761
Chris Craik268a9c02015-12-09 18:05:12 -0800762void OpReorderer::deferBitmapRectOp(const BitmapRectOp& op) {
Chris Craikf09ff5a2015-12-08 17:21:58 -0800763 BakedOpState* bakedState = tryBakeOpState(op);
764 if (!bakedState) return; // quick rejected
765 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
766}
767
Chris Craik268a9c02015-12-09 18:05:12 -0800768void OpReorderer::deferCirclePropsOp(const CirclePropsOp& op) {
769 // allocate a temporary oval op (with mAllocator, so it persists until render), so the
770 // renderer doesn't have to handle the RoundRectPropsOp type, and so state baking is simple.
771 float x = *(op.x);
772 float y = *(op.y);
773 float radius = *(op.radius);
774 Rect unmappedBounds(x - radius, y - radius, x + radius, y + radius);
775 const OvalOp* resolvedOp = new (mAllocator) OvalOp(
776 unmappedBounds,
777 op.localMatrix,
778 op.localClipRect,
779 op.paint);
780 deferOvalOp(*resolvedOp);
781}
782
Chris Craike29ce6f2015-12-10 16:25:13 -0800783void OpReorderer::deferFunctorOp(const FunctorOp& op) {
784 BakedOpState* bakedState = tryBakeOpState(op);
785 if (!bakedState) return; // quick rejected
786 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::None);
787}
788
Chris Craik268a9c02015-12-09 18:05:12 -0800789void OpReorderer::deferLinesOp(const LinesOp& op) {
Chris Craik386aa032015-12-07 17:08:25 -0800790 batchid_t batch = op.paint->isAntiAlias() ? OpBatchType::AlphaVertices : OpBatchType::Vertices;
Chris Craik268a9c02015-12-09 18:05:12 -0800791 deferStrokeableOp(op, batch, BakedOpState::StrokeBehavior::Forced);
Chris Craik386aa032015-12-07 17:08:25 -0800792}
793
Chris Craik268a9c02015-12-09 18:05:12 -0800794void OpReorderer::deferOvalOp(const OvalOp& op) {
795 deferStrokeableOp(op, tessBatchId(op));
Chris Craik386aa032015-12-07 17:08:25 -0800796}
797
Chris Craik268a9c02015-12-09 18:05:12 -0800798void OpReorderer::deferPatchOp(const PatchOp& op) {
Chris Craikf09ff5a2015-12-08 17:21:58 -0800799 BakedOpState* bakedState = tryBakeOpState(op);
800 if (!bakedState) return; // quick rejected
801
802 if (bakedState->computedState.transform.isPureTranslate()
803 && PaintUtils::getXfermodeDirect(op.paint) == SkXfermode::kSrcOver_Mode) {
804 mergeid_t mergeId = (mergeid_t) op.bitmap->getGenerationID();
805 // TODO: AssetAtlas in mergeId
806
807 // Only use the MergedPatch batchId when merged, so Bitmap+Patch don't try to merge together
808 currentLayer().deferMergeableOp(mAllocator, bakedState, OpBatchType::MergedPatch, mergeId);
809 } else {
810 // Use Bitmap batchId since Bitmap+Patch use same shader
811 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
812 }
813}
814
Chris Craik268a9c02015-12-09 18:05:12 -0800815void OpReorderer::deferPathOp(const PathOp& op) {
816 deferStrokeableOp(op, OpBatchType::Bitmap);
Chris Craik386aa032015-12-07 17:08:25 -0800817}
818
Chris Craik268a9c02015-12-09 18:05:12 -0800819void OpReorderer::deferPointsOp(const PointsOp& op) {
Chris Craik386aa032015-12-07 17:08:25 -0800820 batchid_t batch = op.paint->isAntiAlias() ? OpBatchType::AlphaVertices : OpBatchType::Vertices;
Chris Craik268a9c02015-12-09 18:05:12 -0800821 deferStrokeableOp(op, batch, BakedOpState::StrokeBehavior::Forced);
Chris Craika1717272015-11-19 13:02:43 -0800822}
823
Chris Craik268a9c02015-12-09 18:05:12 -0800824void OpReorderer::deferRectOp(const RectOp& op) {
825 deferStrokeableOp(op, tessBatchId(op));
Chris Craik386aa032015-12-07 17:08:25 -0800826}
827
Chris Craik268a9c02015-12-09 18:05:12 -0800828void OpReorderer::deferRoundRectOp(const RoundRectOp& op) {
829 deferStrokeableOp(op, tessBatchId(op));
Chris Craikb565df12015-10-05 13:00:52 -0700830}
831
Chris Craik268a9c02015-12-09 18:05:12 -0800832void OpReorderer::deferRoundRectPropsOp(const RoundRectPropsOp& op) {
833 // allocate a temporary round rect op (with mAllocator, so it persists until render), so the
834 // renderer doesn't have to handle the RoundRectPropsOp type, and so state baking is simple.
835 const RoundRectOp* resolvedOp = new (mAllocator) RoundRectOp(
836 Rect(*(op.left), *(op.top), *(op.right), *(op.bottom)),
837 op.localMatrix,
838 op.localClipRect,
839 op.paint, *op.rx, *op.ry);
840 deferRoundRectOp(*resolvedOp);
841}
842
843void OpReorderer::deferSimpleRectsOp(const SimpleRectsOp& op) {
Chris Craik15c3f192015-12-03 12:16:56 -0800844 BakedOpState* bakedState = tryBakeOpState(op);
845 if (!bakedState) return; // quick rejected
846 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Vertices);
Chris Craikb565df12015-10-05 13:00:52 -0700847}
848
Chris Craikd7448e62015-12-15 10:34:36 -0800849static batchid_t textBatchId(const SkPaint& paint) {
850 // TODO: better handling of shader (since we won't care about color then)
851 return paint.getColor() == SK_ColorBLACK ? OpBatchType::Text : OpBatchType::ColorText;
852}
853
Chris Craik268a9c02015-12-09 18:05:12 -0800854void OpReorderer::deferTextOp(const TextOp& op) {
Chris Craik15c3f192015-12-03 12:16:56 -0800855 BakedOpState* bakedState = tryBakeOpState(op);
856 if (!bakedState) return; // quick rejected
Chris Craika1717272015-11-19 13:02:43 -0800857
Chris Craikd7448e62015-12-15 10:34:36 -0800858 batchid_t batchId = textBatchId(*(op.paint));
Chris Craik15c3f192015-12-03 12:16:56 -0800859 if (bakedState->computedState.transform.isPureTranslate()
860 && PaintUtils::getXfermodeDirect(op.paint) == SkXfermode::kSrcOver_Mode) {
861 mergeid_t mergeId = reinterpret_cast<mergeid_t>(op.paint->getColor());
862 currentLayer().deferMergeableOp(mAllocator, bakedState, batchId, mergeId);
863 } else {
864 currentLayer().deferUnmergeableOp(mAllocator, bakedState, batchId);
865 }
Chris Craika1717272015-11-19 13:02:43 -0800866}
867
Chris Craikd7448e62015-12-15 10:34:36 -0800868void OpReorderer::deferTextOnPathOp(const TextOnPathOp& op) {
869 BakedOpState* bakedState = tryBakeOpState(op);
870 if (!bakedState) return; // quick rejected
871 currentLayer().deferUnmergeableOp(mAllocator, bakedState, textBatchId(*(op.paint)));
872}
873
Chris Craik8ecf41c2015-11-16 10:27:59 -0800874void OpReorderer::saveForLayer(uint32_t layerWidth, uint32_t layerHeight,
875 float contentTranslateX, float contentTranslateY,
876 const Rect& repaintRect,
877 const Vector3& lightCenter,
Chris Craik0b7e8242015-10-28 16:50:44 -0700878 const BeginLayerOp* beginLayerOp, RenderNode* renderNode) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700879 mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
Chris Craik818c9fb2015-10-23 14:33:42 -0700880 mCanvasState.writableSnapshot()->initializeViewport(layerWidth, layerHeight);
Chris Craik6fe991e52015-10-20 09:39:42 -0700881 mCanvasState.writableSnapshot()->roundRectClipState = nullptr;
Chris Craik98787e62015-11-13 10:55:30 -0800882 mCanvasState.writableSnapshot()->setRelativeLightCenter(lightCenter);
Chris Craik8ecf41c2015-11-16 10:27:59 -0800883 mCanvasState.writableSnapshot()->transform->loadTranslate(
884 contentTranslateX, contentTranslateY, 0);
885 mCanvasState.writableSnapshot()->setClip(
886 repaintRect.left, repaintRect.top, repaintRect.right, repaintRect.bottom);
Chris Craik98787e62015-11-13 10:55:30 -0800887
Chris Craik8ecf41c2015-11-16 10:27:59 -0800888 // create a new layer repaint, and push its index on the stack
Chris Craik6fe991e52015-10-20 09:39:42 -0700889 mLayerStack.push_back(mLayerReorderers.size());
Chris Craik98787e62015-11-13 10:55:30 -0800890 mLayerReorderers.emplace_back(layerWidth, layerHeight, repaintRect, beginLayerOp, renderNode);
Chris Craik0b7e8242015-10-28 16:50:44 -0700891}
892
893void OpReorderer::restoreForLayer() {
894 // restore canvas, and pop finished layer off of the stack
895 mCanvasState.restore();
896 mLayerStack.pop_back();
897}
898
899// TODO: test rejection at defer time, where the bounds become empty
Chris Craik268a9c02015-12-09 18:05:12 -0800900void OpReorderer::deferBeginLayerOp(const BeginLayerOp& op) {
Chris Craik8ecf41c2015-11-16 10:27:59 -0800901 uint32_t layerWidth = (uint32_t) op.unmappedBounds.getWidth();
902 uint32_t layerHeight = (uint32_t) op.unmappedBounds.getHeight();
903
904 auto previous = mCanvasState.currentSnapshot();
905 Vector3 lightCenter = previous->getRelativeLightCenter();
906
907 // Combine all transforms used to present saveLayer content:
908 // parent content transform * canvas transform * bounds offset
909 Matrix4 contentTransform(*previous->transform);
910 contentTransform.multiply(op.localMatrix);
911 contentTransform.translate(op.unmappedBounds.left, op.unmappedBounds.top);
912
913 Matrix4 inverseContentTransform;
914 inverseContentTransform.loadInverse(contentTransform);
915
916 // map the light center into layer-relative space
917 inverseContentTransform.mapPoint3d(lightCenter);
918
919 // Clip bounds of temporary layer to parent's clip rect, so:
920 Rect saveLayerBounds(layerWidth, layerHeight);
921 // 1) transform Rect(width, height) into parent's space
922 // note: left/top offsets put in contentTransform above
923 contentTransform.mapRect(saveLayerBounds);
924 // 2) intersect with parent's clip
925 saveLayerBounds.doIntersect(previous->getRenderTargetClip());
926 // 3) and transform back
927 inverseContentTransform.mapRect(saveLayerBounds);
928 saveLayerBounds.doIntersect(Rect(layerWidth, layerHeight));
929 saveLayerBounds.roundOut();
930
931 // if bounds are reduced, will clip the layer's area by reducing required bounds...
932 layerWidth = saveLayerBounds.getWidth();
933 layerHeight = saveLayerBounds.getHeight();
934 // ...and shifting drawing content to account for left/top side clipping
935 float contentTranslateX = -saveLayerBounds.left;
936 float contentTranslateY = -saveLayerBounds.top;
937
938 saveForLayer(layerWidth, layerHeight,
939 contentTranslateX, contentTranslateY,
940 Rect(layerWidth, layerHeight),
941 lightCenter,
942 &op, nullptr);
Chris Craik6fe991e52015-10-20 09:39:42 -0700943}
Chris Craikb565df12015-10-05 13:00:52 -0700944
Chris Craik268a9c02015-12-09 18:05:12 -0800945void OpReorderer::deferEndLayerOp(const EndLayerOp& /* ignored */) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700946 const BeginLayerOp& beginLayerOp = *currentLayer().beginLayerOp;
Chris Craik6fe991e52015-10-20 09:39:42 -0700947 int finishedLayerIndex = mLayerStack.back();
Chris Craik0b7e8242015-10-28 16:50:44 -0700948
949 restoreForLayer();
Chris Craik6fe991e52015-10-20 09:39:42 -0700950
951 // record the draw operation into the previous layer's list of draw commands
952 // uses state from the associated beginLayerOp, since it has all the state needed for drawing
953 LayerOp* drawLayerOp = new (mAllocator) LayerOp(
954 beginLayerOp.unmappedBounds,
955 beginLayerOp.localMatrix,
956 beginLayerOp.localClipRect,
Chris Craik818c9fb2015-10-23 14:33:42 -0700957 beginLayerOp.paint,
Chris Craik5854b342015-10-26 15:49:56 -0700958 &mLayerReorderers[finishedLayerIndex].offscreenBuffer);
Chris Craik6fe991e52015-10-20 09:39:42 -0700959 BakedOpState* bakedOpState = tryBakeOpState(*drawLayerOp);
960
961 if (bakedOpState) {
962 // Layer will be drawn into parent layer (which is now current, since we popped mLayerStack)
963 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Bitmap);
964 } else {
965 // Layer won't be drawn - delete its drawing batches to prevent it from doing any work
966 mLayerReorderers[finishedLayerIndex].clear();
967 return;
Chris Craikb565df12015-10-05 13:00:52 -0700968 }
969}
970
Chris Craik268a9c02015-12-09 18:05:12 -0800971void OpReorderer::deferLayerOp(const LayerOp& op) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700972 LOG_ALWAYS_FATAL("unsupported");
Chris Craikb565df12015-10-05 13:00:52 -0700973}
974
Chris Craik268a9c02015-12-09 18:05:12 -0800975void OpReorderer::deferShadowOp(const ShadowOp& op) {
Chris Craikd3daa312015-11-06 10:59:56 -0800976 LOG_ALWAYS_FATAL("unsupported");
977}
978
Chris Craikb565df12015-10-05 13:00:52 -0700979} // namespace uirenderer
980} // namespace android