1 /*
2  * Copyright 2016 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #ifndef GrDirectContextPriv_DEFINED
9 #define GrDirectContextPriv_DEFINED
10 
11 #include "include/core/SkSpan.h"
12 #include "include/core/SkSurface.h"
13 #include "include/gpu/GrDirectContext.h"
14 
15 class GrAtlasManager;
16 class GrBackendFormat;
17 class GrBackendRenderTarget;
18 class GrMemoryPool;
19 class GrOnFlushCallbackObject;
20 class GrRenderTargetProxy;
21 class GrSemaphore;
22 class GrSurfaceProxy;
23 
24 class SkDeferredDisplayList;
25 class SkTaskGroup;
26 
27 /** Class that adds methods to GrDirectContext that are only intended for use internal to Skia.
28     This class is purely a privileged window into GrDirectContext. It should never have additional
29     data members or virtual methods. */
30 class GrDirectContextPriv {
31 public:
32 
33     // from GrContext_Base
contextID()34     uint32_t contextID() const { return fContext->contextID(); }
35 
matches(GrContext_Base * candidate)36     bool matches(GrContext_Base* candidate) const { return fContext->matches(candidate); }
37 
options()38     const GrContextOptions& options() const { return fContext->options(); }
39 
caps()40     const GrCaps* caps() const { return fContext->caps(); }
41     sk_sp<const GrCaps> refCaps() const;
42 
asImageContext()43     GrImageContext* asImageContext() { return fContext->asImageContext(); }
asRecordingContext()44     GrRecordingContext* asRecordingContext() { return fContext->asRecordingContext(); }
45 
46     // from GrRecordingContext
proxyProvider()47     GrProxyProvider* proxyProvider() { return fContext->proxyProvider(); }
proxyProvider()48     const GrProxyProvider* proxyProvider() const { return fContext->proxyProvider(); }
49 
50     /** This is only useful for debug purposes */
SkDEBUGCODE(GrSingleOwner * singleOwner ()const{ return fContext->singleOwner(); } )51     SkDEBUGCODE(GrSingleOwner* singleOwner() const { return fContext->singleOwner(); } )
52 
53     // from GrRecordingContext
54     GrDrawingManager* drawingManager() { return fContext->drawingManager(); }
55 
recordTimeAllocator()56     SkArenaAlloc* recordTimeAllocator() { return fContext->arenas().recordTimeAllocator(); }
arenas()57     GrRecordingContext::Arenas arenas() { return fContext->arenas(); }
58 
getGrStrikeCache()59     GrStrikeCache* getGrStrikeCache() { return fContext->fStrikeCache.get(); }
getTextBlobCache()60     GrTextBlobCache* getTextBlobCache() { return fContext->getTextBlobCache(); }
61 
threadSafeCache()62     GrThreadSafeCache* threadSafeCache() { return fContext->threadSafeCache(); }
63 
64     /**
65      * Registers an object for flush-related callbacks. (See GrOnFlushCallbackObject.)
66      *
67      * NOTE: the drawing manager tracks this object as a raw pointer; it is up to the caller to
68      * ensure its lifetime is tied to that of the context.
69      */
70     void addOnFlushCallbackObject(GrOnFlushCallbackObject*);
71 
auditTrail()72     GrAuditTrail* auditTrail() { return fContext->auditTrail(); }
73 
74     /**
75      * Finalizes all pending reads and writes to the surfaces and also performs an MSAA resolves
76      * if necessary. The GrSurfaceProxy array is treated as a hint. If it is supplied the context
77      * will guarantee that the draws required for those proxies are flushed but it could do more.
78      * If no array is provided then all current work will be flushed.
79      *
80      * It is not necessary to call this before reading the render target via Skia/GrContext.
81      * GrContext will detect when it must perform a resolve before reading pixels back from the
82      * surface or using it as a texture.
83      */
84     GrSemaphoresSubmitted flushSurfaces(
85                 SkSpan<GrSurfaceProxy*>,
86                 SkSurface::BackendSurfaceAccess = SkSurface::BackendSurfaceAccess::kNoAccess,
87                 const GrFlushInfo& = {},
88                 const GrBackendSurfaceMutableState* newState = nullptr);
89 
90     /** Version of above that flushes for a single proxy. Null is allowed. */
91     GrSemaphoresSubmitted flushSurface(
92                 GrSurfaceProxy* proxy,
93                 SkSurface::BackendSurfaceAccess access = SkSurface::BackendSurfaceAccess::kNoAccess,
94                 const GrFlushInfo& info = {},
95                 const GrBackendSurfaceMutableState* newState = nullptr) {
96         size_t size = proxy ? 1 : 0;
97         return this->flushSurfaces({&proxy, size}, access, info, newState);
98     }
99 
100     /**
101      * Returns true if createPMToUPMEffect and createUPMToPMEffect will succeed. In other words,
102      * did we find a pair of round-trip preserving conversion effects?
103      */
104     bool validPMUPMConversionExists();
105 
106     /**
107      * These functions create premul <-> unpremul effects, using the specialized round-trip effects
108      * from GrConfigConversionEffect.
109      */
110     std::unique_ptr<GrFragmentProcessor> createPMToUPMEffect(std::unique_ptr<GrFragmentProcessor>);
111     std::unique_ptr<GrFragmentProcessor> createUPMToPMEffect(std::unique_ptr<GrFragmentProcessor>);
112 
getTaskGroup()113     SkTaskGroup* getTaskGroup() { return fContext->fTaskGroup.get(); }
114 
resourceProvider()115     GrResourceProvider* resourceProvider() { return fContext->fResourceProvider.get(); }
resourceProvider()116     const GrResourceProvider* resourceProvider() const { return fContext->fResourceProvider.get(); }
117 
getResourceCache()118     GrResourceCache* getResourceCache() { return fContext->fResourceCache.get(); }
119 
getGpu()120     GrGpu* getGpu() { return fContext->fGpu.get(); }
getGpu()121     const GrGpu* getGpu() const { return fContext->fGpu.get(); }
122 
123     // This accessor should only ever be called by the GrOpFlushState.
getAtlasManager()124     GrAtlasManager* getAtlasManager() {
125         return fContext->onGetAtlasManager();
126     }
127 
128     // This accessor should only ever be called by the GrOpFlushState.
getSmallPathAtlasMgr()129     GrSmallPathAtlasMgr* getSmallPathAtlasMgr() {
130         return fContext->onGetSmallPathAtlasMgr();
131     }
132 
133     void createDDLTask(sk_sp<const SkDeferredDisplayList>,
134                        sk_sp<GrRenderTargetProxy> newDest,
135                        SkIPoint offset);
136 
137     bool compile(const GrProgramDesc&, const GrProgramInfo&);
138 
getPersistentCache()139     GrContextOptions::PersistentCache* getPersistentCache() { return fContext->fPersistentCache; }
getShaderErrorHandler()140     GrContextOptions::ShaderErrorHandler* getShaderErrorHandler() const {
141         return fContext->fShaderErrorHandler;
142     }
143 
clientMappedBufferManager()144     GrClientMappedBufferManager* clientMappedBufferManager() {
145         return fContext->fMappedBufferManager.get();
146     }
147 
148 #if GR_TEST_UTILS
149     /** Reset GPU stats */
150     void resetGpuStats() const;
151 
152     /** Prints cache stats to the string if GR_CACHE_STATS == 1. */
153     void dumpCacheStats(SkString*) const;
154     void dumpCacheStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
155     void printCacheStats() const;
156 
157     /** Prints GPU stats to the string if GR_GPU_STATS == 1. */
158     void dumpGpuStats(SkString*) const;
159     void dumpGpuStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
160     void printGpuStats() const;
161 
162     /** These are only active if GR_GPU_STATS == 1. */
163     void resetContextStats() const;
164     void dumpContextStats(SkString*) const;
165     void dumpContextStatsKeyValuePairs(SkTArray<SkString>* keys, SkTArray<double>* values) const;
166     void printContextStats() const;
167 
168     /** Get pointer to atlas texture for given mask format. Note that this wraps an
169         actively mutating texture in an SkImage. This could yield unexpected results
170         if it gets cached or used more generally. */
171     sk_sp<SkImage> testingOnly_getFontAtlasImage(GrMaskFormat format, unsigned int index = 0);
172 
173     void testingOnly_flushAndRemoveOnFlushCallbackObject(GrOnFlushCallbackObject*);
174 #endif
175 
176 private:
GrDirectContextPriv(GrDirectContext * context)177     explicit GrDirectContextPriv(GrDirectContext* context) : fContext(context) {}
178     GrDirectContextPriv(const GrDirectContextPriv&) = delete;
179     GrDirectContextPriv& operator=(const GrDirectContextPriv&) = delete;
180 
181     // No taking addresses of this type.
182     const GrDirectContextPriv* operator&() const;
183     GrDirectContextPriv* operator&();
184 
185     GrDirectContext* fContext;
186 
187     friend class GrDirectContext; // to construct/copy this type.
188 };
189 
priv()190 inline GrDirectContextPriv GrDirectContext::priv() { return GrDirectContextPriv(this); }
191 
192 // NOLINTNEXTLINE(readability-const-return-type)
priv()193 inline const GrDirectContextPriv GrDirectContext::priv() const {
194     return GrDirectContextPriv(const_cast<GrDirectContext*>(this));
195 }
196 
197 #endif
198