1/* 2 * Copyright 2016 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can be 5 * found in the LICENSE file. 6 */ 7 8#ifndef GrReducedClip_DEFINED 9#define GrReducedClip_DEFINED 10 11#include "GrFragmentProcessor.h" 12#include "GrWindowRectangles.h" 13#include "SkClipStack.h" 14#include "SkTLList.h" 15 16class GrContext; 17class GrCoverageCountingPathRenderer; 18class GrRenderTargetContext; 19 20/** 21 * This class takes a clip stack and produces a reduced set of elements that are equivalent to 22 * applying that full stack within a specified query rectangle. 23 */ 24class SK_API GrReducedClip { 25public: 26 using Element = SkClipStack::Element; 27 using ElementList = SkTLList<SkClipStack::Element, 16>; 28 29 GrReducedClip(const SkClipStack&, const SkRect& queryBounds, const GrShaderCaps* caps, 30 int maxWindowRectangles = 0, int maxAnalyticFPs = 0, 31 GrCoverageCountingPathRenderer* = nullptr); 32 33 enum class InitialState : bool { 34 kAllIn, 35 kAllOut 36 }; 37 38 InitialState initialState() const { return fInitialState; } 39 40 /** 41 * If hasScissor() is true, the clip mask is not valid outside this rect and the caller must 42 * enforce this scissor during draw. 43 */ 44 const SkIRect& scissor() const { SkASSERT(fHasScissor); return fScissor; } 45 int left() const { return this->scissor().left(); } 46 int top() const { return this->scissor().top(); } 47 int width() const { return this->scissor().width(); } 48 int height() const { return this->scissor().height(); } 49 50 /** 51 * Indicates whether scissor() is defined. It will always be defined if the maskElements() are 52 * nonempty. 53 */ 54 bool hasScissor() const { return fHasScissor; } 55 56 /** 57 * If nonempty, the clip mask is not valid inside these windows and the caller must clip them 58 * out using the window rectangles GPU extension. 59 */ 60 const GrWindowRectangles& windowRectangles() const { return fWindowRects; } 61 62 /** 63 * An ordered list of clip elements that could not be skipped or implemented by other means. If 64 * nonempty, the caller must create an alpha and/or stencil mask for these elements and apply it 65 * during draw. 66 */ 67 const ElementList& maskElements() const { return fMaskElements; } 68 69 /** 70 * If maskElements() are nonempty, uniquely identifies the region of the clip mask that falls 71 * inside of scissor(). 72 * 73 * NOTE: since clip elements might fall outside the query bounds, different regions of the same 74 * clip stack might have more or less restrictive IDs. 75 * 76 * FIXME: this prevents us from reusing a sub-rect of a perfectly good mask when that rect has 77 * been assigned a less restrictive ID. 78 */ 79 uint32_t maskGenID() const { SkASSERT(!fMaskElements.isEmpty()); return fMaskGenID; } 80 81 /** 82 * Indicates whether antialiasing is required to process any of the mask elements. 83 */ 84 bool maskRequiresAA() const { SkASSERT(!fMaskElements.isEmpty()); return fMaskRequiresAA; } 85 86 bool drawAlphaClipMask(GrRenderTargetContext*) const; 87 bool drawStencilClipMask(GrContext*, GrRenderTargetContext*) const; 88 89 int numAnalyticFPs() const { return fAnalyticFPs.count() + fCCPRClipPaths.count(); } 90 91 /** 92 * Called once the client knows the ID of the opList that the clip FPs will operate in. This 93 * method finishes any outstanding work that was waiting for the opList ID, then detaches and 94 * returns this class's list of FPs that complete the clip. 95 * 96 * NOTE: this must be called AFTER producing the clip mask (if any) because draw calls on 97 * the render target context, surface allocations, and even switching render targets (pre MDB) 98 * may cause flushes or otherwise change which opList the actual draw is going into. 99 */ 100 std::unique_ptr<GrFragmentProcessor> finishAndDetachAnalyticFPs(GrProxyProvider*, 101 uint32_t opListID, 102 int rtWidth, int rtHeight); 103 104private: 105 void walkStack(const SkClipStack&, const SkRect& queryBounds); 106 107 enum class ClipResult { 108 kNotClipped, 109 kClipped, 110 kMadeEmpty 111 }; 112 113 // Intersects the clip with the element's interior, regardless of inverse fill type. 114 // NOTE: do not call for elements followed by ops that can grow the clip. 115 ClipResult clipInsideElement(const Element*); 116 117 // Intersects the clip with the element's exterior, regardless of inverse fill type. 118 // NOTE: do not call for elements followed by ops that can grow the clip. 119 ClipResult clipOutsideElement(const Element*); 120 121 void addWindowRectangle(const SkRect& elementInteriorRect, bool elementIsAA); 122 123 enum class Invert : bool { 124 kNo = false, 125 kYes = true 126 }; 127 128 static GrClipEdgeType GetClipEdgeType(Invert, GrAA); 129 ClipResult addAnalyticFP(const SkRect& deviceSpaceRect, Invert, GrAA); 130 ClipResult addAnalyticFP(const SkRRect& deviceSpaceRRect, Invert, GrAA); 131 ClipResult addAnalyticFP(const SkPath& deviceSpacePath, Invert, GrAA); 132 133 void makeEmpty(); 134 135 const GrShaderCaps* fCaps; 136 const int fMaxWindowRectangles; 137 const int fMaxAnalyticFPs; 138 GrCoverageCountingPathRenderer* const fCCPR; 139 140 InitialState fInitialState; 141 SkIRect fScissor; 142 bool fHasScissor; 143 SkRect fAAClipRect; 144 uint32_t fAAClipRectGenID; // GenID the mask will have if includes the AA clip rect. 145 GrWindowRectangles fWindowRects; 146 ElementList fMaskElements; 147 uint32_t fMaskGenID; 148 bool fMaskRequiresAA; 149 SkSTArray<4, std::unique_ptr<GrFragmentProcessor>> fAnalyticFPs; 150 SkSTArray<4, SkPath> fCCPRClipPaths; // Will convert to FPs once we have an opList ID for CCPR. 151}; 152 153#endif 154