blob: 80efaed44d1302bcdc03aa045ab97ae76d3b8315 [file] [log] [blame]
Chris Craikb565df12015-10-05 13:00:52 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "OpReorderer.h"
18
Chris Craik0b7e8242015-10-28 16:50:44 -070019#include "LayerUpdateQueue.h"
Chris Craik161f54b2015-11-05 11:08:52 -080020#include "RenderNode.h"
21#include "utils/FatVector.h"
22#include "utils/PaintUtils.h"
Chris Craikb565df12015-10-05 13:00:52 -070023
Chris Craik161f54b2015-11-05 11:08:52 -080024#include <SkCanvas.h>
Chris Craikd3daa312015-11-06 10:59:56 -080025#include <SkPathOps.h>
Chris Craik161f54b2015-11-05 11:08:52 -080026#include <utils/Trace.h>
27#include <utils/TypeHelpers.h>
Chris Craikb565df12015-10-05 13:00:52 -070028
29namespace android {
30namespace uirenderer {
31
32class BatchBase {
33
34public:
35 BatchBase(batchid_t batchId, BakedOpState* op, bool merging)
36 : mBatchId(batchId)
37 , mMerging(merging) {
38 mBounds = op->computedState.clippedBounds;
39 mOps.push_back(op);
40 }
41
42 bool intersects(const Rect& rect) const {
43 if (!rect.intersects(mBounds)) return false;
44
45 for (const BakedOpState* op : mOps) {
46 if (rect.intersects(op->computedState.clippedBounds)) {
47 return true;
48 }
49 }
50 return false;
51 }
52
53 batchid_t getBatchId() const { return mBatchId; }
54 bool isMerging() const { return mMerging; }
55
56 const std::vector<BakedOpState*>& getOps() const { return mOps; }
57
58 void dump() const {
Chris Craik6fe991e52015-10-20 09:39:42 -070059 ALOGD(" Batch %p, id %d, merging %d, count %d, bounds " RECT_STRING,
60 this, mBatchId, mMerging, mOps.size(), RECT_ARGS(mBounds));
Chris Craikb565df12015-10-05 13:00:52 -070061 }
62protected:
63 batchid_t mBatchId;
64 Rect mBounds;
65 std::vector<BakedOpState*> mOps;
66 bool mMerging;
67};
68
69class OpBatch : public BatchBase {
70public:
71 static void* operator new(size_t size, LinearAllocator& allocator) {
72 return allocator.alloc(size);
73 }
74
75 OpBatch(batchid_t batchId, BakedOpState* op)
76 : BatchBase(batchId, op, false) {
77 }
78
79 void batchOp(BakedOpState* op) {
80 mBounds.unionWith(op->computedState.clippedBounds);
81 mOps.push_back(op);
82 }
83};
84
85class MergingOpBatch : public BatchBase {
86public:
87 static void* operator new(size_t size, LinearAllocator& allocator) {
88 return allocator.alloc(size);
89 }
90
91 MergingOpBatch(batchid_t batchId, BakedOpState* op)
92 : BatchBase(batchId, op, true) {
93 }
94
95 /*
96 * Helper for determining if a new op can merge with a MergingDrawBatch based on their bounds
97 * and clip side flags. Positive bounds delta means new bounds fit in old.
98 */
99 static inline bool checkSide(const int currentFlags, const int newFlags, const int side,
100 float boundsDelta) {
101 bool currentClipExists = currentFlags & side;
102 bool newClipExists = newFlags & side;
103
104 // if current is clipped, we must be able to fit new bounds in current
105 if (boundsDelta > 0 && currentClipExists) return false;
106
107 // if new is clipped, we must be able to fit current bounds in new
108 if (boundsDelta < 0 && newClipExists) return false;
109
110 return true;
111 }
112
113 static bool paintIsDefault(const SkPaint& paint) {
114 return paint.getAlpha() == 255
115 && paint.getColorFilter() == nullptr
116 && paint.getShader() == nullptr;
117 }
118
119 static bool paintsAreEquivalent(const SkPaint& a, const SkPaint& b) {
120 return a.getAlpha() == b.getAlpha()
121 && a.getColorFilter() == b.getColorFilter()
122 && a.getShader() == b.getShader();
123 }
124
125 /*
126 * Checks if a (mergeable) op can be merged into this batch
127 *
128 * If true, the op's multiDraw must be guaranteed to handle both ops simultaneously, so it is
129 * important to consider all paint attributes used in the draw calls in deciding both a) if an
130 * op tries to merge at all, and b) if the op can merge with another set of ops
131 *
132 * False positives can lead to information from the paints of subsequent merged operations being
133 * dropped, so we make simplifying qualifications on the ops that can merge, per op type.
134 */
135 bool canMergeWith(BakedOpState* op) const {
136 bool isTextBatch = getBatchId() == OpBatchType::Text
137 || getBatchId() == OpBatchType::ColorText;
138
139 // Overlapping other operations is only allowed for text without shadow. For other ops,
140 // multiDraw isn't guaranteed to overdraw correctly
141 if (!isTextBatch || PaintUtils::hasTextShadow(op->op->paint)) {
142 if (intersects(op->computedState.clippedBounds)) return false;
143 }
144
145 const BakedOpState* lhs = op;
146 const BakedOpState* rhs = mOps[0];
147
148 if (!MathUtils::areEqual(lhs->alpha, rhs->alpha)) return false;
149
150 // Identical round rect clip state means both ops will clip in the same way, or not at all.
151 // As the state objects are const, we can compare their pointers to determine mergeability
152 if (lhs->roundRectClipState != rhs->roundRectClipState) return false;
153 if (lhs->projectionPathMask != rhs->projectionPathMask) return false;
154
155 /* Clipping compatibility check
156 *
157 * Exploits the fact that if a op or batch is clipped on a side, its bounds will equal its
158 * clip for that side.
159 */
160 const int currentFlags = mClipSideFlags;
161 const int newFlags = op->computedState.clipSideFlags;
162 if (currentFlags != OpClipSideFlags::None || newFlags != OpClipSideFlags::None) {
163 const Rect& opBounds = op->computedState.clippedBounds;
164 float boundsDelta = mBounds.left - opBounds.left;
165 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Left, boundsDelta)) return false;
166 boundsDelta = mBounds.top - opBounds.top;
167 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Top, boundsDelta)) return false;
168
169 // right and bottom delta calculation reversed to account for direction
170 boundsDelta = opBounds.right - mBounds.right;
171 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Right, boundsDelta)) return false;
172 boundsDelta = opBounds.bottom - mBounds.bottom;
173 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Bottom, boundsDelta)) return false;
174 }
175
176 const SkPaint* newPaint = op->op->paint;
177 const SkPaint* oldPaint = mOps[0]->op->paint;
178
179 if (newPaint == oldPaint) {
180 // if paints are equal, then modifiers + paint attribs don't need to be compared
181 return true;
182 } else if (newPaint && !oldPaint) {
183 return paintIsDefault(*newPaint);
184 } else if (!newPaint && oldPaint) {
185 return paintIsDefault(*oldPaint);
186 }
187 return paintsAreEquivalent(*newPaint, *oldPaint);
188 }
189
190 void mergeOp(BakedOpState* op) {
191 mBounds.unionWith(op->computedState.clippedBounds);
192 mOps.push_back(op);
193
194 const int newClipSideFlags = op->computedState.clipSideFlags;
195 mClipSideFlags |= newClipSideFlags;
196
197 const Rect& opClip = op->computedState.clipRect;
198 if (newClipSideFlags & OpClipSideFlags::Left) mClipRect.left = opClip.left;
199 if (newClipSideFlags & OpClipSideFlags::Top) mClipRect.top = opClip.top;
200 if (newClipSideFlags & OpClipSideFlags::Right) mClipRect.right = opClip.right;
201 if (newClipSideFlags & OpClipSideFlags::Bottom) mClipRect.bottom = opClip.bottom;
202 }
203
204private:
205 int mClipSideFlags = 0;
206 Rect mClipRect;
207};
208
Chris Craik0b7e8242015-10-28 16:50:44 -0700209OpReorderer::LayerReorderer::LayerReorderer(uint32_t width, uint32_t height,
210 const BeginLayerOp* beginLayerOp, RenderNode* renderNode)
211 : width(width)
212 , height(height)
213 , offscreenBuffer(renderNode ? renderNode->getLayer() : nullptr)
214 , beginLayerOp(beginLayerOp)
215 , renderNode(renderNode) {}
216
Chris Craik6fe991e52015-10-20 09:39:42 -0700217// iterate back toward target to see if anything drawn since should overlap the new op
Chris Craik818c9fb2015-10-23 14:33:42 -0700218// if no target, merging ops still iterate to find similar batch to insert after
Chris Craik6fe991e52015-10-20 09:39:42 -0700219void OpReorderer::LayerReorderer::locateInsertIndex(int batchId, const Rect& clippedBounds,
220 BatchBase** targetBatch, size_t* insertBatchIndex) const {
221 for (int i = mBatches.size() - 1; i >= 0; i--) {
222 BatchBase* overBatch = mBatches[i];
223
224 if (overBatch == *targetBatch) break;
225
226 // TODO: also consider shader shared between batch types
227 if (batchId == overBatch->getBatchId()) {
228 *insertBatchIndex = i + 1;
229 if (!*targetBatch) break; // found insert position, quit
230 }
231
232 if (overBatch->intersects(clippedBounds)) {
233 // NOTE: it may be possible to optimize for special cases where two operations
234 // of the same batch/paint could swap order, such as with a non-mergeable
235 // (clipped) and a mergeable text operation
236 *targetBatch = nullptr;
237 break;
238 }
239 }
240}
241
242void OpReorderer::LayerReorderer::deferUnmergeableOp(LinearAllocator& allocator,
243 BakedOpState* op, batchid_t batchId) {
244 OpBatch* targetBatch = mBatchLookup[batchId];
245
246 size_t insertBatchIndex = mBatches.size();
247 if (targetBatch) {
248 locateInsertIndex(batchId, op->computedState.clippedBounds,
249 (BatchBase**)(&targetBatch), &insertBatchIndex);
250 }
251
252 if (targetBatch) {
253 targetBatch->batchOp(op);
254 } else {
255 // new non-merging batch
256 targetBatch = new (allocator) OpBatch(batchId, op);
257 mBatchLookup[batchId] = targetBatch;
258 mBatches.insert(mBatches.begin() + insertBatchIndex, targetBatch);
259 }
260}
261
262// insertion point of a new batch, will hopefully be immediately after similar batch
263// (generally, should be similar shader)
264void OpReorderer::LayerReorderer::deferMergeableOp(LinearAllocator& allocator,
265 BakedOpState* op, batchid_t batchId, mergeid_t mergeId) {
266 MergingOpBatch* targetBatch = nullptr;
267
268 // Try to merge with any existing batch with same mergeId
269 auto getResult = mMergingBatchLookup[batchId].find(mergeId);
270 if (getResult != mMergingBatchLookup[batchId].end()) {
271 targetBatch = getResult->second;
272 if (!targetBatch->canMergeWith(op)) {
273 targetBatch = nullptr;
274 }
275 }
276
277 size_t insertBatchIndex = mBatches.size();
278 locateInsertIndex(batchId, op->computedState.clippedBounds,
279 (BatchBase**)(&targetBatch), &insertBatchIndex);
280
281 if (targetBatch) {
282 targetBatch->mergeOp(op);
283 } else {
284 // new merging batch
285 targetBatch = new (allocator) MergingOpBatch(batchId, op);
286 mMergingBatchLookup[batchId].insert(std::make_pair(mergeId, targetBatch));
287
288 mBatches.insert(mBatches.begin() + insertBatchIndex, targetBatch);
289 }
290}
291
Chris Craik5854b342015-10-26 15:49:56 -0700292void OpReorderer::LayerReorderer::replayBakedOpsImpl(void* arg, BakedOpDispatcher* receivers) const {
293 ATRACE_NAME("flush drawing commands");
Chris Craik6fe991e52015-10-20 09:39:42 -0700294 for (const BatchBase* batch : mBatches) {
295 // TODO: different behavior based on batch->isMerging()
296 for (const BakedOpState* op : batch->getOps()) {
297 receivers[op->op->opId](arg, *op->op, *op);
298 }
299 }
300}
301
302void OpReorderer::LayerReorderer::dump() const {
Chris Craik0b7e8242015-10-28 16:50:44 -0700303 ALOGD("LayerReorderer %p, %ux%u buffer %p, blo %p, rn %p",
304 this, width, height, offscreenBuffer, beginLayerOp, renderNode);
Chris Craik6fe991e52015-10-20 09:39:42 -0700305 for (const BatchBase* batch : mBatches) {
306 batch->dump();
307 }
308}
Chris Craikb565df12015-10-05 13:00:52 -0700309
Chris Craik0b7e8242015-10-28 16:50:44 -0700310OpReorderer::OpReorderer(const LayerUpdateQueue& layers, const SkRect& clip,
311 uint32_t viewportWidth, uint32_t viewportHeight,
Chris Craik818c9fb2015-10-23 14:33:42 -0700312 const std::vector< sp<RenderNode> >& nodes)
Chris Craik6fe991e52015-10-20 09:39:42 -0700313 : mCanvasState(*this) {
Chris Craik818c9fb2015-10-23 14:33:42 -0700314 ATRACE_NAME("prepare drawing commands");
Chris Craik818c9fb2015-10-23 14:33:42 -0700315 mLayerReorderers.emplace_back(viewportWidth, viewportHeight);
Chris Craikd3daa312015-11-06 10:59:56 -0800316 mLayerStack.push_back(0);
Chris Craikb565df12015-10-05 13:00:52 -0700317
Chris Craikb565df12015-10-05 13:00:52 -0700318 mCanvasState.initializeSaveStack(viewportWidth, viewportHeight,
Chris Craikddf22152015-10-14 17:42:47 -0700319 clip.fLeft, clip.fTop, clip.fRight, clip.fBottom,
320 Vector3());
Chris Craik0b7e8242015-10-28 16:50:44 -0700321
322 // Render all layers to be updated, in order. Defer in reverse order, so that they'll be
323 // updated in the order they're passed in (mLayerReorderers are issued to Renderer in reverse)
324 for (int i = layers.entries().size() - 1; i >= 0; i--) {
325 RenderNode* layerNode = layers.entries()[i].renderNode;
326 const Rect& layerDamage = layers.entries()[i].damage;
327
328 saveForLayer(layerNode->getWidth(), layerNode->getHeight(), nullptr, layerNode);
329 mCanvasState.writableSnapshot()->setClip(
330 layerDamage.left, layerDamage.top, layerDamage.right, layerDamage.bottom);
331
332 if (layerNode->getDisplayList()) {
333 deferImpl(*(layerNode->getDisplayList()));
334 }
335 restoreForLayer();
336 }
337
338 // Defer Fbo0
Chris Craikb565df12015-10-05 13:00:52 -0700339 for (const sp<RenderNode>& node : nodes) {
340 if (node->nothingToDraw()) continue;
341
Chris Craik0b7e8242015-10-28 16:50:44 -0700342 int count = mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
343 deferNodePropsAndOps(*node);
344 mCanvasState.restoreToCount(count);
Chris Craikb565df12015-10-05 13:00:52 -0700345 }
346}
347
Chris Craik818c9fb2015-10-23 14:33:42 -0700348OpReorderer::OpReorderer(int viewportWidth, int viewportHeight, const DisplayList& displayList)
349 : mCanvasState(*this) {
Chris Craikb565df12015-10-05 13:00:52 -0700350 ATRACE_NAME("prepare drawing commands");
Chris Craik818c9fb2015-10-23 14:33:42 -0700351 mLayerReorderers.emplace_back(viewportWidth, viewportHeight);
352 mLayerStack.push_back(0);
353
Chris Craikb565df12015-10-05 13:00:52 -0700354 mCanvasState.initializeSaveStack(viewportWidth, viewportHeight,
355 0, 0, viewportWidth, viewportHeight, Vector3());
Chris Craikb36af872015-10-16 14:23:12 -0700356 deferImpl(displayList);
Chris Craikb565df12015-10-05 13:00:52 -0700357}
358
Chris Craik818c9fb2015-10-23 14:33:42 -0700359void OpReorderer::onViewportInitialized() {}
360
361void OpReorderer::onSnapshotRestored(const Snapshot& removed, const Snapshot& restored) {}
362
Chris Craik0b7e8242015-10-28 16:50:44 -0700363void OpReorderer::deferNodePropsAndOps(RenderNode& node) {
Chris Craik76caecf2015-11-02 19:17:45 -0800364 if (node.applyViewProperties(mCanvasState, mAllocator)) {
Chris Craik0b7e8242015-10-28 16:50:44 -0700365 // not rejected so render
366 if (node.getLayer()) {
367 // HW layer
368 LayerOp* drawLayerOp = new (mAllocator) LayerOp(node);
369 BakedOpState* bakedOpState = tryBakeOpState(*drawLayerOp);
370 if (bakedOpState) {
371 // Layer will be drawn into parent layer (which is now current, since we popped mLayerStack)
372 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Bitmap);
373 }
374 } else {
375 deferImpl(*(node.getDisplayList()));
376 }
377 }
378}
379
Chris Craik161f54b2015-11-05 11:08:52 -0800380typedef key_value_pair_t<float, const RenderNodeOp*> ZRenderNodeOpPair;
381
382template <typename V>
383static void buildZSortedChildList(V* zTranslatedNodes,
384 const DisplayList& displayList, const DisplayList::Chunk& chunk) {
385 if (chunk.beginChildIndex == chunk.endChildIndex) return;
386
387 for (size_t i = chunk.beginChildIndex; i < chunk.endChildIndex; i++) {
388 RenderNodeOp* childOp = displayList.getChildren()[i];
389 RenderNode* child = childOp->renderNode;
390 float childZ = child->properties().getZ();
391
392 if (!MathUtils::isZero(childZ) && chunk.reorderChildren) {
393 zTranslatedNodes->push_back(ZRenderNodeOpPair(childZ, childOp));
394 childOp->skipInOrderDraw = true;
395 } else if (!child->properties().getProjectBackwards()) {
396 // regular, in order drawing DisplayList
397 childOp->skipInOrderDraw = false;
398 }
399 }
400
401 // Z sort any 3d children (stable-ness makes z compare fall back to standard drawing order)
402 std::stable_sort(zTranslatedNodes->begin(), zTranslatedNodes->end());
403}
404
405template <typename V>
406static size_t findNonNegativeIndex(const V& zTranslatedNodes) {
407 for (size_t i = 0; i < zTranslatedNodes.size(); i++) {
408 if (zTranslatedNodes[i].key >= 0.0f) return i;
409 }
410 return zTranslatedNodes.size();
411}
412
413template <typename V>
414void OpReorderer::defer3dChildren(ChildrenSelectMode mode, const V& zTranslatedNodes) {
415 const int size = zTranslatedNodes.size();
416 if (size == 0
417 || (mode == ChildrenSelectMode::Negative&& zTranslatedNodes[0].key > 0.0f)
418 || (mode == ChildrenSelectMode::Positive && zTranslatedNodes[size - 1].key < 0.0f)) {
419 // no 3d children to draw
420 return;
421 }
422
423 /**
424 * Draw shadows and (potential) casters mostly in order, but allow the shadows of casters
425 * with very similar Z heights to draw together.
426 *
427 * This way, if Views A & B have the same Z height and are both casting shadows, the shadows are
428 * underneath both, and neither's shadow is drawn on top of the other.
429 */
430 const size_t nonNegativeIndex = findNonNegativeIndex(zTranslatedNodes);
431 size_t drawIndex, shadowIndex, endIndex;
432 if (mode == ChildrenSelectMode::Negative) {
433 drawIndex = 0;
434 endIndex = nonNegativeIndex;
435 shadowIndex = endIndex; // draw no shadows
436 } else {
437 drawIndex = nonNegativeIndex;
438 endIndex = size;
439 shadowIndex = drawIndex; // potentially draw shadow for each pos Z child
440 }
441
442 float lastCasterZ = 0.0f;
443 while (shadowIndex < endIndex || drawIndex < endIndex) {
444 if (shadowIndex < endIndex) {
445 const RenderNodeOp* casterNodeOp = zTranslatedNodes[shadowIndex].value;
446 const float casterZ = zTranslatedNodes[shadowIndex].key;
447 // attempt to render the shadow if the caster about to be drawn is its caster,
448 // OR if its caster's Z value is similar to the previous potential caster
449 if (shadowIndex == drawIndex || casterZ - lastCasterZ < 0.1f) {
450 deferShadow(*casterNodeOp);
451
452 lastCasterZ = casterZ; // must do this even if current caster not casting a shadow
453 shadowIndex++;
454 continue;
455 }
456 }
457
458 const RenderNodeOp* childOp = zTranslatedNodes[drawIndex].value;
459 deferRenderNodeOp(*childOp);
460 drawIndex++;
461 }
462}
463
464void OpReorderer::deferShadow(const RenderNodeOp& casterNodeOp) {
Chris Craikd3daa312015-11-06 10:59:56 -0800465 auto& node = *casterNodeOp.renderNode;
466 auto& properties = node.properties();
467
468 if (properties.getAlpha() <= 0.0f
469 || properties.getOutline().getAlpha() <= 0.0f
470 || !properties.getOutline().getPath()
471 || properties.getScaleX() == 0
472 || properties.getScaleY() == 0) {
473 // no shadow to draw
474 return;
475 }
476
477 const SkPath* casterOutlinePath = properties.getOutline().getPath();
478 const SkPath* revealClipPath = properties.getRevealClip().getPath();
479 if (revealClipPath && revealClipPath->isEmpty()) return;
480
481 float casterAlpha = properties.getAlpha() * properties.getOutline().getAlpha();
482
483 // holds temporary SkPath to store the result of intersections
484 SkPath* frameAllocatedPath = nullptr;
485 const SkPath* casterPath = casterOutlinePath;
486
487 // intersect the shadow-casting path with the reveal, if present
488 if (revealClipPath) {
489 frameAllocatedPath = createFrameAllocatedPath();
490
491 Op(*casterPath, *revealClipPath, kIntersect_SkPathOp, frameAllocatedPath);
492 casterPath = frameAllocatedPath;
493 }
494
495 // intersect the shadow-casting path with the clipBounds, if present
496 if (properties.getClippingFlags() & CLIP_TO_CLIP_BOUNDS) {
497 if (!frameAllocatedPath) {
498 frameAllocatedPath = createFrameAllocatedPath();
499 }
500 Rect clipBounds;
501 properties.getClippingRectForFlags(CLIP_TO_CLIP_BOUNDS, &clipBounds);
502 SkPath clipBoundsPath;
503 clipBoundsPath.addRect(clipBounds.left, clipBounds.top,
504 clipBounds.right, clipBounds.bottom);
505
506 Op(*casterPath, clipBoundsPath, kIntersect_SkPathOp, frameAllocatedPath);
507 casterPath = frameAllocatedPath;
508 }
509
510 ShadowOp* shadowOp = new (mAllocator) ShadowOp(casterNodeOp, casterAlpha, casterPath,
511 mCanvasState.getLocalClipBounds());
512 BakedOpState* bakedOpState = BakedOpState::tryShadowOpConstruct(
513 mAllocator, *mCanvasState.currentSnapshot(), shadowOp);
514 if (CC_LIKELY(bakedOpState)) {
515 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Shadow);
516 }
Chris Craik161f54b2015-11-05 11:08:52 -0800517}
Chris Craikd3daa312015-11-06 10:59:56 -0800518
Chris Craikb565df12015-10-05 13:00:52 -0700519/**
520 * Used to define a list of lambdas referencing private OpReorderer::onXXXXOp() methods.
521 *
522 * This allows opIds embedded in the RecordedOps to be used for dispatching to these lambdas. E.g. a
523 * BitmapOp op then would be dispatched to OpReorderer::onBitmapOp(const BitmapOp&)
524 */
Chris Craik6fe991e52015-10-20 09:39:42 -0700525#define OP_RECEIVER(Type) \
Chris Craikb565df12015-10-05 13:00:52 -0700526 [](OpReorderer& reorderer, const RecordedOp& op) { reorderer.on##Type(static_cast<const Type&>(op)); },
Chris Craikb36af872015-10-16 14:23:12 -0700527void OpReorderer::deferImpl(const DisplayList& displayList) {
Chris Craikb565df12015-10-05 13:00:52 -0700528 static std::function<void(OpReorderer& reorderer, const RecordedOp&)> receivers[] = {
Chris Craik6fe991e52015-10-20 09:39:42 -0700529 MAP_OPS(OP_RECEIVER)
Chris Craikb565df12015-10-05 13:00:52 -0700530 };
Chris Craikb36af872015-10-16 14:23:12 -0700531 for (const DisplayList::Chunk& chunk : displayList.getChunks()) {
Chris Craik161f54b2015-11-05 11:08:52 -0800532 FatVector<ZRenderNodeOpPair, 16> zTranslatedNodes;
533 buildZSortedChildList(&zTranslatedNodes, displayList, chunk);
534
535 defer3dChildren(ChildrenSelectMode::Negative, zTranslatedNodes);
Chris Craikb565df12015-10-05 13:00:52 -0700536 for (size_t opIndex = chunk.beginOpIndex; opIndex < chunk.endOpIndex; opIndex++) {
Chris Craikb36af872015-10-16 14:23:12 -0700537 const RecordedOp* op = displayList.getOps()[opIndex];
Chris Craikb565df12015-10-05 13:00:52 -0700538 receivers[op->opId](*this, *op);
539 }
Chris Craik161f54b2015-11-05 11:08:52 -0800540 defer3dChildren(ChildrenSelectMode::Positive, zTranslatedNodes);
Chris Craikb565df12015-10-05 13:00:52 -0700541 }
542}
543
Chris Craik161f54b2015-11-05 11:08:52 -0800544void OpReorderer::deferRenderNodeOp(const RenderNodeOp& op) {
545 if (op.renderNode->nothingToDraw()) return;
Chris Craik6fe991e52015-10-20 09:39:42 -0700546 int count = mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
Chris Craikb565df12015-10-05 13:00:52 -0700547
548 // apply state from RecordedOp
549 mCanvasState.concatMatrix(op.localMatrix);
550 mCanvasState.clipRect(op.localClipRect.left, op.localClipRect.top,
551 op.localClipRect.right, op.localClipRect.bottom, SkRegion::kIntersect_Op);
552
Chris Craik0b7e8242015-10-28 16:50:44 -0700553 // then apply state from node properties, and defer ops
554 deferNodePropsAndOps(*op.renderNode);
555
Chris Craik6fe991e52015-10-20 09:39:42 -0700556 mCanvasState.restoreToCount(count);
Chris Craikb565df12015-10-05 13:00:52 -0700557}
558
Chris Craik161f54b2015-11-05 11:08:52 -0800559void OpReorderer::onRenderNodeOp(const RenderNodeOp& op) {
560 if (!op.skipInOrderDraw) {
561 deferRenderNodeOp(op);
562 }
563}
564
Chris Craikb565df12015-10-05 13:00:52 -0700565static batchid_t tessellatedBatchId(const SkPaint& paint) {
566 return paint.getPathEffect()
567 ? OpBatchType::AlphaMaskTexture
568 : (paint.isAntiAlias() ? OpBatchType::AlphaVertices : OpBatchType::Vertices);
569}
570
571void OpReorderer::onBitmapOp(const BitmapOp& op) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700572 BakedOpState* bakedStateOp = tryBakeOpState(op);
Chris Craikb565df12015-10-05 13:00:52 -0700573 if (!bakedStateOp) return; // quick rejected
574
575 mergeid_t mergeId = (mergeid_t) op.bitmap->getGenerationID();
576 // TODO: AssetAtlas
Chris Craik6fe991e52015-10-20 09:39:42 -0700577 currentLayer().deferMergeableOp(mAllocator, bakedStateOp, OpBatchType::Bitmap, mergeId);
Chris Craikb565df12015-10-05 13:00:52 -0700578}
579
580void OpReorderer::onRectOp(const RectOp& op) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700581 BakedOpState* bakedStateOp = tryBakeOpState(op);
Chris Craikb565df12015-10-05 13:00:52 -0700582 if (!bakedStateOp) return; // quick rejected
Chris Craik6fe991e52015-10-20 09:39:42 -0700583 currentLayer().deferUnmergeableOp(mAllocator, bakedStateOp, tessellatedBatchId(*op.paint));
Chris Craikb565df12015-10-05 13:00:52 -0700584}
585
586void OpReorderer::onSimpleRectsOp(const SimpleRectsOp& op) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700587 BakedOpState* bakedStateOp = tryBakeOpState(op);
Chris Craikb565df12015-10-05 13:00:52 -0700588 if (!bakedStateOp) return; // quick rejected
Chris Craik6fe991e52015-10-20 09:39:42 -0700589 currentLayer().deferUnmergeableOp(mAllocator, bakedStateOp, OpBatchType::Vertices);
Chris Craikb565df12015-10-05 13:00:52 -0700590}
591
Chris Craik0b7e8242015-10-28 16:50:44 -0700592void OpReorderer::saveForLayer(uint32_t layerWidth, uint32_t layerHeight,
593 const BeginLayerOp* beginLayerOp, RenderNode* renderNode) {
Chris Craik818c9fb2015-10-23 14:33:42 -0700594
Chris Craik6fe991e52015-10-20 09:39:42 -0700595 mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
596 mCanvasState.writableSnapshot()->transform->loadIdentity();
Chris Craik818c9fb2015-10-23 14:33:42 -0700597 mCanvasState.writableSnapshot()->initializeViewport(layerWidth, layerHeight);
Chris Craik6fe991e52015-10-20 09:39:42 -0700598 mCanvasState.writableSnapshot()->roundRectClipState = nullptr;
Chris Craikb565df12015-10-05 13:00:52 -0700599
Chris Craik6fe991e52015-10-20 09:39:42 -0700600 // create a new layer, and push its index on the stack
601 mLayerStack.push_back(mLayerReorderers.size());
Chris Craik0b7e8242015-10-28 16:50:44 -0700602 mLayerReorderers.emplace_back(layerWidth, layerHeight, beginLayerOp, renderNode);
603}
604
605void OpReorderer::restoreForLayer() {
606 // restore canvas, and pop finished layer off of the stack
607 mCanvasState.restore();
608 mLayerStack.pop_back();
609}
610
611// TODO: test rejection at defer time, where the bounds become empty
612void OpReorderer::onBeginLayerOp(const BeginLayerOp& op) {
613 const uint32_t layerWidth = (uint32_t) op.unmappedBounds.getWidth();
614 const uint32_t layerHeight = (uint32_t) op.unmappedBounds.getHeight();
615 saveForLayer(layerWidth, layerHeight, &op, nullptr);
Chris Craik6fe991e52015-10-20 09:39:42 -0700616}
Chris Craikb565df12015-10-05 13:00:52 -0700617
Chris Craik6fe991e52015-10-20 09:39:42 -0700618void OpReorderer::onEndLayerOp(const EndLayerOp& /* ignored */) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700619 const BeginLayerOp& beginLayerOp = *currentLayer().beginLayerOp;
Chris Craik6fe991e52015-10-20 09:39:42 -0700620 int finishedLayerIndex = mLayerStack.back();
Chris Craik0b7e8242015-10-28 16:50:44 -0700621
622 restoreForLayer();
Chris Craik6fe991e52015-10-20 09:39:42 -0700623
624 // record the draw operation into the previous layer's list of draw commands
625 // uses state from the associated beginLayerOp, since it has all the state needed for drawing
626 LayerOp* drawLayerOp = new (mAllocator) LayerOp(
627 beginLayerOp.unmappedBounds,
628 beginLayerOp.localMatrix,
629 beginLayerOp.localClipRect,
Chris Craik818c9fb2015-10-23 14:33:42 -0700630 beginLayerOp.paint,
Chris Craik5854b342015-10-26 15:49:56 -0700631 &mLayerReorderers[finishedLayerIndex].offscreenBuffer);
Chris Craik6fe991e52015-10-20 09:39:42 -0700632 BakedOpState* bakedOpState = tryBakeOpState(*drawLayerOp);
633
634 if (bakedOpState) {
635 // Layer will be drawn into parent layer (which is now current, since we popped mLayerStack)
636 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Bitmap);
637 } else {
638 // Layer won't be drawn - delete its drawing batches to prevent it from doing any work
639 mLayerReorderers[finishedLayerIndex].clear();
640 return;
Chris Craikb565df12015-10-05 13:00:52 -0700641 }
642}
643
Chris Craik6fe991e52015-10-20 09:39:42 -0700644void OpReorderer::onLayerOp(const LayerOp& op) {
645 LOG_ALWAYS_FATAL("unsupported");
Chris Craikb565df12015-10-05 13:00:52 -0700646}
647
Chris Craikd3daa312015-11-06 10:59:56 -0800648void OpReorderer::onShadowOp(const ShadowOp& op) {
649 LOG_ALWAYS_FATAL("unsupported");
650}
651
Chris Craikb565df12015-10-05 13:00:52 -0700652} // namespace uirenderer
653} // namespace android