• Home
  • History
  • Annotate
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2015 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef GrDrawingManager_DEFINED
9 #define GrDrawingManager_DEFINED
10 
11 #include "GrBufferAllocPool.h"
12 #include "GrDeferredUpload.h"
13 #include "GrPathRenderer.h"
14 #include "GrPathRendererChain.h"
15 #include "GrResourceCache.h"
16 #include "SkSurface.h"
17 #include "SkTArray.h"
18 #include "text/GrTextContext.h"
19 
20 class GrCoverageCountingPathRenderer;
21 class GrOnFlushCallbackObject;
22 class GrOpFlushState;
23 class GrRecordingContext;
24 class GrRenderTargetContext;
25 class GrRenderTargetProxy;
26 class GrRenderTargetOpList;
27 class GrSoftwarePathRenderer;
28 class GrTextureContext;
29 class GrTextureOpList;
30 class SkDeferredDisplayList;
31 
32 // The GrDrawingManager allocates a new GrRenderTargetContext for each GrRenderTarget
33 // but all of them still land in the same GrOpList!
34 //
35 // In the future this class will allocate a new GrRenderTargetContext for
36 // each GrRenderTarget/GrOpList and manage the DAG.
37 class GrDrawingManager {
38 public:
39     ~GrDrawingManager();
40 
41     void freeGpuResources();
42 
43     sk_sp<GrRenderTargetContext> makeRenderTargetContext(sk_sp<GrSurfaceProxy>,
44                                                          sk_sp<SkColorSpace>,
45                                                          const SkSurfaceProps*,
46                                                          bool managedOpList = true);
47     sk_sp<GrTextureContext> makeTextureContext(sk_sp<GrSurfaceProxy>, sk_sp<SkColorSpace>);
48 
49     // The caller automatically gets a ref on the returned opList. It must
50     // be balanced by an unref call.
51     // A managed opList is controlled by the drawing manager (i.e., sorted & flushed with the
52     // other). An unmanaged one is created and used by the onFlushCallback.
53     sk_sp<GrRenderTargetOpList> newRTOpList(GrRenderTargetProxy* rtp, bool managedOpList);
54     sk_sp<GrTextureOpList> newTextureOpList(GrTextureProxy* textureProxy);
55 
getContext()56     GrRecordingContext* getContext() { return fContext; }
57 
58     GrTextContext* getTextContext();
59 
60     GrPathRenderer* getPathRenderer(const GrPathRenderer::CanDrawPathArgs& args,
61                                     bool allowSW,
62                                     GrPathRendererChain::DrawType drawType,
63                                     GrPathRenderer::StencilSupport* stencilSupport = nullptr);
64 
65     GrPathRenderer* getSoftwarePathRenderer();
66 
67     // Returns a direct pointer to the coverage counting path renderer, or null if it is not
68     // supported and turned on.
69     GrCoverageCountingPathRenderer* getCoverageCountingPathRenderer();
70 
71     void flushIfNecessary();
72 
73     static bool ProgramUnitTest(GrContext* context, int maxStages, int maxLevels);
74 
75     GrSemaphoresSubmitted prepareSurfaceForExternalIO(GrSurfaceProxy*,
76                                                       SkSurface::BackendSurfaceAccess access,
77                                                       GrFlushFlags flags,
78                                                       int numSemaphores,
79                                                       GrBackendSemaphore backendSemaphores[],
80                                                       GrGpuFinishedProc finishedProc,
81                                                       GrGpuFinishedContext finishedContext);
82 
83     void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
84 
85 #if GR_TEST_UTILS
86     void testingOnly_removeOnFlushCallbackObject(GrOnFlushCallbackObject*);
87 #endif
88 
89     void moveOpListsToDDL(SkDeferredDisplayList* ddl);
90     void copyOpListsFromDDL(const SkDeferredDisplayList*, GrRenderTargetProxy* newDest);
91 
92 private:
93     // This class encapsulates maintenance and manipulation of the drawing manager's DAG of opLists.
94     class OpListDAG {
95     public:
96         OpListDAG(bool explicitlyAllocating, bool sortOpLists);
97         ~OpListDAG();
98 
99         // Currently, when explicitly allocating resources, this call will topologically sort the
100         // opLists.
101         // MDB TODO: remove once incremental opList sorting is enabled
102         void prepForFlush();
103 
104         void closeAll(const GrCaps* caps);
105 
106         // A yucky combination of closeAll and reset
107         void cleanup(const GrCaps* caps);
108 
109         void gatherIDs(SkSTArray<8, uint32_t, true>* idArray) const;
110 
111         void reset();
112 
113         // These calls forceably remove an opList from the DAG. They are problematic bc they just
114         // remove the opList but don't cleanup any refering pointers (i.e., dependency pointers
115         // in the DAG). They work right now bc they are only called at flush time, after the
116         // topological sort is complete (so the dangling pointers aren't used).
117         void removeOpList(int index);
118         void removeOpLists(int startIndex, int stopIndex);
119 
empty()120         bool empty() const { return fOpLists.empty(); }
numOpLists()121         int numOpLists() const { return fOpLists.count(); }
122 
opList(int index)123         GrOpList* opList(int index) { return fOpLists[index].get(); }
opList(int index)124         const GrOpList* opList(int index) const { return fOpLists[index].get(); }
125 
back()126         GrOpList* back() { return fOpLists.back().get(); }
back()127         const GrOpList* back() const { return fOpLists.back().get(); }
128 
129         void add(sk_sp<GrOpList>);
130         void add(const SkTArray<sk_sp<GrOpList>>&);
131 
132         void swap(SkTArray<sk_sp<GrOpList>>* opLists);
133 
sortingOpLists()134         bool sortingOpLists() const { return fSortOpLists; }
135 
136     private:
137         SkTArray<sk_sp<GrOpList>> fOpLists;
138         bool                      fSortOpLists;
139     };
140 
141     GrDrawingManager(GrRecordingContext*, const GrPathRendererChain::Options&,
142                      const GrTextContext::Options&,
143                      bool explicitlyAllocating,
144                      bool sortOpLists,
145                      GrContextOptions::Enable reduceOpListSplitting);
146 
147     bool wasAbandoned() const;
148 
149     void cleanup();
150 
151     // return true if any opLists were actually executed; false otherwise
152     bool executeOpLists(int startIndex, int stopIndex, GrOpFlushState*, int* numOpListsExecuted);
153 
154     GrSemaphoresSubmitted flush(GrSurfaceProxy* proxy,
155                                 SkSurface::BackendSurfaceAccess access,
156                                 GrFlushFlags flags,
157                                 int numSemaphores,
158                                 GrBackendSemaphore backendSemaphores[],
159                                 GrGpuFinishedProc finishedProc,
160                                 GrGpuFinishedContext finishedContext);
161 
162     SkDEBUGCODE(void validate() const);
163 
164     friend class GrContext; // access to: flush & cleanup
165     friend class GrContextPriv; // access to: flush
166     friend class GrOnFlushResourceProvider; // this is just a shallow wrapper around this class
167     friend class GrRecordingContext;  // access to: ctor
168     friend class SkImage; // for access to: flush
169 
170     static const int kNumPixelGeometries = 5; // The different pixel geometries
171     static const int kNumDFTOptions = 2;      // DFT or no DFT
172 
173     GrRecordingContext*               fContext;
174     GrPathRendererChain::Options      fOptionsForPathRendererChain;
175     GrTextContext::Options            fOptionsForTextContext;
176     // This cache is used by both the vertex and index pools. It reuses memory across multiple
177     // flushes.
178     sk_sp<GrBufferAllocPool::CpuBufferCache> fCpuBufferCache;
179 
180     OpListDAG                         fDAG;
181     GrOpList*                         fActiveOpList = nullptr;
182     // These are the IDs of the opLists currently being flushed (in internalFlush)
183     SkSTArray<8, uint32_t, true>      fFlushingOpListIDs;
184     // These are the new opLists generated by the onFlush CBs
185     SkSTArray<8, sk_sp<GrOpList>>     fOnFlushCBOpLists;
186 
187     std::unique_ptr<GrTextContext>    fTextContext;
188 
189     std::unique_ptr<GrPathRendererChain> fPathRendererChain;
190     sk_sp<GrSoftwarePathRenderer>     fSoftwarePathRenderer;
191 
192     GrTokenTracker                    fTokenTracker;
193     bool                              fFlushing;
194     bool                              fReduceOpListSplitting;
195 
196     SkTArray<GrOnFlushCallbackObject*> fOnFlushCBObjects;
197 };
198 
199 #endif
200