1
2 /*
3 * Copyright 2010 Google Inc.
4 *
5 * Use of this source code is governed by a BSD-style license that can be
6 * found in the LICENSE file.
7 */
8
9
10 #include "GrGpu.h"
11
12 #include "GrBufferAllocPool.h"
13 #include "GrContext.h"
14 #include "GrDrawTargetCaps.h"
15 #include "GrIndexBuffer.h"
16 #include "GrStencilBuffer.h"
17 #include "GrVertexBuffer.h"
18
19 // probably makes no sense for this to be less than a page
20 static const size_t VERTEX_POOL_VB_SIZE = 1 << 18;
21 static const int VERTEX_POOL_VB_COUNT = 4;
22 static const size_t INDEX_POOL_IB_SIZE = 1 << 16;
23 static const int INDEX_POOL_IB_COUNT = 4;
24
25 ////////////////////////////////////////////////////////////////////////////////
26
27 #define DEBUG_INVAL_BUFFER 0xdeadcafe
28 #define DEBUG_INVAL_START_IDX -1
29
GrGpu(GrContext * context)30 GrGpu::GrGpu(GrContext* context)
31 : GrDrawTarget(context)
32 , fResetTimestamp(kExpiredTimestamp+1)
33 , fResetBits(kAll_GrBackendState)
34 , fVertexPool(NULL)
35 , fIndexPool(NULL)
36 , fVertexPoolUseCnt(0)
37 , fIndexPoolUseCnt(0)
38 , fQuadIndexBuffer(NULL) {
39
40 fClipMaskManager.setGpu(this);
41
42 fGeomPoolStateStack.push_back();
43 #ifdef SK_DEBUG
44 GeometryPoolState& poolState = fGeomPoolStateStack.back();
45 poolState.fPoolVertexBuffer = (GrVertexBuffer*)DEBUG_INVAL_BUFFER;
46 poolState.fPoolStartVertex = DEBUG_INVAL_START_IDX;
47 poolState.fPoolIndexBuffer = (GrIndexBuffer*)DEBUG_INVAL_BUFFER;
48 poolState.fPoolStartIndex = DEBUG_INVAL_START_IDX;
49 #endif
50 }
51
~GrGpu()52 GrGpu::~GrGpu() {
53 SkSafeSetNull(fQuadIndexBuffer);
54 delete fVertexPool;
55 fVertexPool = NULL;
56 delete fIndexPool;
57 fIndexPool = NULL;
58 }
59
contextAbandoned()60 void GrGpu::contextAbandoned() {}
61
62 ////////////////////////////////////////////////////////////////////////////////
63
createTexture(const GrTextureDesc & desc,const void * srcData,size_t rowBytes)64 GrTexture* GrGpu::createTexture(const GrTextureDesc& desc,
65 const void* srcData, size_t rowBytes) {
66 if (!this->caps()->isConfigTexturable(desc.fConfig)) {
67 return NULL;
68 }
69
70 if ((desc.fFlags & kRenderTarget_GrTextureFlagBit) &&
71 !this->caps()->isConfigRenderable(desc.fConfig, desc.fSampleCnt > 0)) {
72 return NULL;
73 }
74
75 GrTexture *tex = NULL;
76 if (GrPixelConfigIsCompressed(desc.fConfig)) {
77 // We shouldn't be rendering into this
78 SkASSERT((desc.fFlags & kRenderTarget_GrTextureFlagBit) == 0);
79
80 if (!this->caps()->npotTextureTileSupport() &&
81 (!SkIsPow2(desc.fWidth) || !SkIsPow2(desc.fHeight))) {
82 return NULL;
83 }
84
85 this->handleDirtyContext();
86 tex = this->onCreateCompressedTexture(desc, srcData);
87 } else {
88 this->handleDirtyContext();
89 tex = this->onCreateTexture(desc, srcData, rowBytes);
90 if (tex &&
91 (kRenderTarget_GrTextureFlagBit & desc.fFlags) &&
92 !(kNoStencil_GrTextureFlagBit & desc.fFlags)) {
93 SkASSERT(tex->asRenderTarget());
94 // TODO: defer this and attach dynamically
95 if (!this->attachStencilBufferToRenderTarget(tex->asRenderTarget())) {
96 tex->unref();
97 return NULL;
98 }
99 }
100 }
101 return tex;
102 }
103
attachStencilBufferToRenderTarget(GrRenderTarget * rt)104 bool GrGpu::attachStencilBufferToRenderTarget(GrRenderTarget* rt) {
105 SkASSERT(NULL == rt->getStencilBuffer());
106 GrStencilBuffer* sb =
107 this->getContext()->findStencilBuffer(rt->width(),
108 rt->height(),
109 rt->numSamples());
110 if (sb) {
111 rt->setStencilBuffer(sb);
112 bool attached = this->attachStencilBufferToRenderTarget(sb, rt);
113 if (!attached) {
114 rt->setStencilBuffer(NULL);
115 }
116 return attached;
117 }
118 if (this->createStencilBufferForRenderTarget(rt,
119 rt->width(), rt->height())) {
120 // Right now we're clearing the stencil buffer here after it is
121 // attached to an RT for the first time. When we start matching
122 // stencil buffers with smaller color targets this will no longer
123 // be correct because it won't be guaranteed to clear the entire
124 // sb.
125 // We used to clear down in the GL subclass using a special purpose
126 // FBO. But iOS doesn't allow a stencil-only FBO. It reports unsupported
127 // FBO status.
128 this->clearStencil(rt);
129 return true;
130 } else {
131 return false;
132 }
133 }
134
wrapBackendTexture(const GrBackendTextureDesc & desc)135 GrTexture* GrGpu::wrapBackendTexture(const GrBackendTextureDesc& desc) {
136 this->handleDirtyContext();
137 GrTexture* tex = this->onWrapBackendTexture(desc);
138 if (NULL == tex) {
139 return NULL;
140 }
141 // TODO: defer this and attach dynamically
142 GrRenderTarget* tgt = tex->asRenderTarget();
143 if (tgt &&
144 !this->attachStencilBufferToRenderTarget(tgt)) {
145 tex->unref();
146 return NULL;
147 } else {
148 return tex;
149 }
150 }
151
wrapBackendRenderTarget(const GrBackendRenderTargetDesc & desc)152 GrRenderTarget* GrGpu::wrapBackendRenderTarget(const GrBackendRenderTargetDesc& desc) {
153 this->handleDirtyContext();
154 return this->onWrapBackendRenderTarget(desc);
155 }
156
createVertexBuffer(size_t size,bool dynamic)157 GrVertexBuffer* GrGpu::createVertexBuffer(size_t size, bool dynamic) {
158 this->handleDirtyContext();
159 return this->onCreateVertexBuffer(size, dynamic);
160 }
161
createIndexBuffer(size_t size,bool dynamic)162 GrIndexBuffer* GrGpu::createIndexBuffer(size_t size, bool dynamic) {
163 this->handleDirtyContext();
164 return this->onCreateIndexBuffer(size, dynamic);
165 }
166
clear(const SkIRect * rect,GrColor color,bool canIgnoreRect,GrRenderTarget * renderTarget)167 void GrGpu::clear(const SkIRect* rect,
168 GrColor color,
169 bool canIgnoreRect,
170 GrRenderTarget* renderTarget) {
171 if (NULL == renderTarget) {
172 renderTarget = this->getDrawState().getRenderTarget();
173 }
174 if (NULL == renderTarget) {
175 SkASSERT(0);
176 return;
177 }
178 this->handleDirtyContext();
179 this->onClear(renderTarget, rect, color, canIgnoreRect);
180 }
181
readPixels(GrRenderTarget * target,int left,int top,int width,int height,GrPixelConfig config,void * buffer,size_t rowBytes)182 bool GrGpu::readPixels(GrRenderTarget* target,
183 int left, int top, int width, int height,
184 GrPixelConfig config, void* buffer,
185 size_t rowBytes) {
186 this->handleDirtyContext();
187 return this->onReadPixels(target, left, top, width, height,
188 config, buffer, rowBytes);
189 }
190
writeTexturePixels(GrTexture * texture,int left,int top,int width,int height,GrPixelConfig config,const void * buffer,size_t rowBytes)191 bool GrGpu::writeTexturePixels(GrTexture* texture,
192 int left, int top, int width, int height,
193 GrPixelConfig config, const void* buffer,
194 size_t rowBytes) {
195 this->handleDirtyContext();
196 return this->onWriteTexturePixels(texture, left, top, width, height,
197 config, buffer, rowBytes);
198 }
199
resolveRenderTarget(GrRenderTarget * target)200 void GrGpu::resolveRenderTarget(GrRenderTarget* target) {
201 SkASSERT(target);
202 this->handleDirtyContext();
203 this->onResolveRenderTarget(target);
204 }
205
winding_path_stencil_settings()206 static const GrStencilSettings& winding_path_stencil_settings() {
207 GR_STATIC_CONST_SAME_STENCIL_STRUCT(gSettings,
208 kIncClamp_StencilOp,
209 kIncClamp_StencilOp,
210 kAlwaysIfInClip_StencilFunc,
211 0xFFFF, 0xFFFF, 0xFFFF);
212 return *GR_CONST_STENCIL_SETTINGS_PTR_FROM_STRUCT_PTR(&gSettings);
213 }
214
even_odd_path_stencil_settings()215 static const GrStencilSettings& even_odd_path_stencil_settings() {
216 GR_STATIC_CONST_SAME_STENCIL_STRUCT(gSettings,
217 kInvert_StencilOp,
218 kInvert_StencilOp,
219 kAlwaysIfInClip_StencilFunc,
220 0xFFFF, 0xFFFF, 0xFFFF);
221 return *GR_CONST_STENCIL_SETTINGS_PTR_FROM_STRUCT_PTR(&gSettings);
222 }
223
getPathStencilSettingsForFillType(SkPath::FillType fill,GrStencilSettings * outStencilSettings)224 void GrGpu::getPathStencilSettingsForFillType(SkPath::FillType fill, GrStencilSettings* outStencilSettings) {
225
226 switch (fill) {
227 default:
228 SkFAIL("Unexpected path fill.");
229 /* fallthrough */;
230 case SkPath::kWinding_FillType:
231 case SkPath::kInverseWinding_FillType:
232 *outStencilSettings = winding_path_stencil_settings();
233 break;
234 case SkPath::kEvenOdd_FillType:
235 case SkPath::kInverseEvenOdd_FillType:
236 *outStencilSettings = even_odd_path_stencil_settings();
237 break;
238 }
239 fClipMaskManager.adjustPathStencilParams(outStencilSettings);
240 }
241
242
243 ////////////////////////////////////////////////////////////////////////////////
244
245 static const int MAX_QUADS = 1 << 12; // max possible: (1 << 14) - 1;
246
247 GR_STATIC_ASSERT(4 * MAX_QUADS <= 65535);
248
fill_indices(uint16_t * indices,int quadCount)249 static inline void fill_indices(uint16_t* indices, int quadCount) {
250 for (int i = 0; i < quadCount; ++i) {
251 indices[6 * i + 0] = 4 * i + 0;
252 indices[6 * i + 1] = 4 * i + 1;
253 indices[6 * i + 2] = 4 * i + 2;
254 indices[6 * i + 3] = 4 * i + 0;
255 indices[6 * i + 4] = 4 * i + 2;
256 indices[6 * i + 5] = 4 * i + 3;
257 }
258 }
259
getQuadIndexBuffer() const260 const GrIndexBuffer* GrGpu::getQuadIndexBuffer() const {
261 if (NULL == fQuadIndexBuffer || fQuadIndexBuffer->wasDestroyed()) {
262 SkSafeUnref(fQuadIndexBuffer);
263 static const int SIZE = sizeof(uint16_t) * 6 * MAX_QUADS;
264 GrGpu* me = const_cast<GrGpu*>(this);
265 fQuadIndexBuffer = me->createIndexBuffer(SIZE, false);
266 if (fQuadIndexBuffer) {
267 uint16_t* indices = (uint16_t*)fQuadIndexBuffer->map();
268 if (indices) {
269 fill_indices(indices, MAX_QUADS);
270 fQuadIndexBuffer->unmap();
271 } else {
272 indices = (uint16_t*)sk_malloc_throw(SIZE);
273 fill_indices(indices, MAX_QUADS);
274 if (!fQuadIndexBuffer->updateData(indices, SIZE)) {
275 fQuadIndexBuffer->unref();
276 fQuadIndexBuffer = NULL;
277 SkFAIL("Can't get indices into buffer!");
278 }
279 sk_free(indices);
280 }
281 }
282 }
283
284 return fQuadIndexBuffer;
285 }
286
287 ////////////////////////////////////////////////////////////////////////////////
288
setupClipAndFlushState(DrawType type,const GrDeviceCoordTexture * dstCopy,GrDrawState::AutoRestoreEffects * are,const SkRect * devBounds)289 bool GrGpu::setupClipAndFlushState(DrawType type, const GrDeviceCoordTexture* dstCopy,
290 GrDrawState::AutoRestoreEffects* are,
291 const SkRect* devBounds) {
292 if (!fClipMaskManager.setupClipping(this->getClip(), are, devBounds)) {
293 return false;
294 }
295
296 if (!this->flushGraphicsState(type, dstCopy)) {
297 return false;
298 }
299
300 return true;
301 }
302
303 ////////////////////////////////////////////////////////////////////////////////
304
geometrySourceWillPush()305 void GrGpu::geometrySourceWillPush() {
306 const GeometrySrcState& geoSrc = this->getGeomSrc();
307 if (kArray_GeometrySrcType == geoSrc.fVertexSrc ||
308 kReserved_GeometrySrcType == geoSrc.fVertexSrc) {
309 this->finalizeReservedVertices();
310 }
311 if (kArray_GeometrySrcType == geoSrc.fIndexSrc ||
312 kReserved_GeometrySrcType == geoSrc.fIndexSrc) {
313 this->finalizeReservedIndices();
314 }
315 GeometryPoolState& newState = fGeomPoolStateStack.push_back();
316 #ifdef SK_DEBUG
317 newState.fPoolVertexBuffer = (GrVertexBuffer*)DEBUG_INVAL_BUFFER;
318 newState.fPoolStartVertex = DEBUG_INVAL_START_IDX;
319 newState.fPoolIndexBuffer = (GrIndexBuffer*)DEBUG_INVAL_BUFFER;
320 newState.fPoolStartIndex = DEBUG_INVAL_START_IDX;
321 #else
322 (void) newState; // silence compiler warning
323 #endif
324 }
325
geometrySourceWillPop(const GeometrySrcState & restoredState)326 void GrGpu::geometrySourceWillPop(const GeometrySrcState& restoredState) {
327 // if popping last entry then pops are unbalanced with pushes
328 SkASSERT(fGeomPoolStateStack.count() > 1);
329 fGeomPoolStateStack.pop_back();
330 }
331
onDraw(const DrawInfo & info)332 void GrGpu::onDraw(const DrawInfo& info) {
333 this->handleDirtyContext();
334 GrDrawState::AutoRestoreEffects are;
335 if (!this->setupClipAndFlushState(PrimTypeToDrawType(info.primitiveType()),
336 info.getDstCopy(), &are, info.getDevBounds())) {
337 return;
338 }
339 this->onGpuDraw(info);
340 }
341
onStencilPath(const GrPath * path,SkPath::FillType fill)342 void GrGpu::onStencilPath(const GrPath* path, SkPath::FillType fill) {
343 this->handleDirtyContext();
344
345 GrDrawState::AutoRestoreEffects are;
346 if (!this->setupClipAndFlushState(kStencilPath_DrawType, NULL, &are, NULL)) {
347 return;
348 }
349
350 this->pathRendering()->stencilPath(path, fill);
351 }
352
353
onDrawPath(const GrPath * path,SkPath::FillType fill,const GrDeviceCoordTexture * dstCopy)354 void GrGpu::onDrawPath(const GrPath* path, SkPath::FillType fill,
355 const GrDeviceCoordTexture* dstCopy) {
356 this->handleDirtyContext();
357
358 drawState()->setDefaultVertexAttribs();
359
360 GrDrawState::AutoRestoreEffects are;
361 if (!this->setupClipAndFlushState(kDrawPath_DrawType, dstCopy, &are, NULL)) {
362 return;
363 }
364
365 this->pathRendering()->drawPath(path, fill);
366 }
367
onDrawPaths(const GrPathRange * pathRange,const uint32_t indices[],int count,const float transforms[],PathTransformType transformsType,SkPath::FillType fill,const GrDeviceCoordTexture * dstCopy)368 void GrGpu::onDrawPaths(const GrPathRange* pathRange,
369 const uint32_t indices[], int count,
370 const float transforms[], PathTransformType transformsType,
371 SkPath::FillType fill, const GrDeviceCoordTexture* dstCopy) {
372 this->handleDirtyContext();
373
374 drawState()->setDefaultVertexAttribs();
375
376 GrDrawState::AutoRestoreEffects are;
377 if (!this->setupClipAndFlushState(kDrawPaths_DrawType, dstCopy, &are, NULL)) {
378 return;
379 }
380
381 pathRange->willDrawPaths(indices, count);
382 this->pathRendering()->drawPaths(pathRange, indices, count, transforms, transformsType, fill);
383 }
384
finalizeReservedVertices()385 void GrGpu::finalizeReservedVertices() {
386 SkASSERT(fVertexPool);
387 fVertexPool->unmap();
388 }
389
finalizeReservedIndices()390 void GrGpu::finalizeReservedIndices() {
391 SkASSERT(fIndexPool);
392 fIndexPool->unmap();
393 }
394
prepareVertexPool()395 void GrGpu::prepareVertexPool() {
396 if (NULL == fVertexPool) {
397 SkASSERT(0 == fVertexPoolUseCnt);
398 fVertexPool = SkNEW_ARGS(GrVertexBufferAllocPool, (this, true,
399 VERTEX_POOL_VB_SIZE,
400 VERTEX_POOL_VB_COUNT));
401 fVertexPool->releaseGpuRef();
402 } else if (!fVertexPoolUseCnt) {
403 // the client doesn't have valid data in the pool
404 fVertexPool->reset();
405 }
406 }
407
prepareIndexPool()408 void GrGpu::prepareIndexPool() {
409 if (NULL == fIndexPool) {
410 SkASSERT(0 == fIndexPoolUseCnt);
411 fIndexPool = SkNEW_ARGS(GrIndexBufferAllocPool, (this, true,
412 INDEX_POOL_IB_SIZE,
413 INDEX_POOL_IB_COUNT));
414 fIndexPool->releaseGpuRef();
415 } else if (!fIndexPoolUseCnt) {
416 // the client doesn't have valid data in the pool
417 fIndexPool->reset();
418 }
419 }
420
onReserveVertexSpace(size_t vertexSize,int vertexCount,void ** vertices)421 bool GrGpu::onReserveVertexSpace(size_t vertexSize,
422 int vertexCount,
423 void** vertices) {
424 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
425
426 SkASSERT(vertexCount > 0);
427 SkASSERT(vertices);
428
429 this->prepareVertexPool();
430
431 *vertices = fVertexPool->makeSpace(vertexSize,
432 vertexCount,
433 &geomPoolState.fPoolVertexBuffer,
434 &geomPoolState.fPoolStartVertex);
435 if (NULL == *vertices) {
436 return false;
437 }
438 ++fVertexPoolUseCnt;
439 return true;
440 }
441
onReserveIndexSpace(int indexCount,void ** indices)442 bool GrGpu::onReserveIndexSpace(int indexCount, void** indices) {
443 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
444
445 SkASSERT(indexCount > 0);
446 SkASSERT(indices);
447
448 this->prepareIndexPool();
449
450 *indices = fIndexPool->makeSpace(indexCount,
451 &geomPoolState.fPoolIndexBuffer,
452 &geomPoolState.fPoolStartIndex);
453 if (NULL == *indices) {
454 return false;
455 }
456 ++fIndexPoolUseCnt;
457 return true;
458 }
459
releaseReservedVertexSpace()460 void GrGpu::releaseReservedVertexSpace() {
461 const GeometrySrcState& geoSrc = this->getGeomSrc();
462 SkASSERT(kReserved_GeometrySrcType == geoSrc.fVertexSrc);
463 size_t bytes = geoSrc.fVertexCount * geoSrc.fVertexSize;
464 fVertexPool->putBack(bytes);
465 --fVertexPoolUseCnt;
466 }
467
releaseReservedIndexSpace()468 void GrGpu::releaseReservedIndexSpace() {
469 const GeometrySrcState& geoSrc = this->getGeomSrc();
470 SkASSERT(kReserved_GeometrySrcType == geoSrc.fIndexSrc);
471 size_t bytes = geoSrc.fIndexCount * sizeof(uint16_t);
472 fIndexPool->putBack(bytes);
473 --fIndexPoolUseCnt;
474 }
475
onSetVertexSourceToArray(const void * vertexArray,int vertexCount)476 void GrGpu::onSetVertexSourceToArray(const void* vertexArray, int vertexCount) {
477 this->prepareVertexPool();
478 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
479 #ifdef SK_DEBUG
480 bool success =
481 #endif
482 fVertexPool->appendVertices(this->getVertexSize(),
483 vertexCount,
484 vertexArray,
485 &geomPoolState.fPoolVertexBuffer,
486 &geomPoolState.fPoolStartVertex);
487 ++fVertexPoolUseCnt;
488 GR_DEBUGASSERT(success);
489 }
490
onSetIndexSourceToArray(const void * indexArray,int indexCount)491 void GrGpu::onSetIndexSourceToArray(const void* indexArray, int indexCount) {
492 this->prepareIndexPool();
493 GeometryPoolState& geomPoolState = fGeomPoolStateStack.back();
494 #ifdef SK_DEBUG
495 bool success =
496 #endif
497 fIndexPool->appendIndices(indexCount,
498 indexArray,
499 &geomPoolState.fPoolIndexBuffer,
500 &geomPoolState.fPoolStartIndex);
501 ++fIndexPoolUseCnt;
502 GR_DEBUGASSERT(success);
503 }
504
releaseVertexArray()505 void GrGpu::releaseVertexArray() {
506 // if vertex source was array, we stowed data in the pool
507 const GeometrySrcState& geoSrc = this->getGeomSrc();
508 SkASSERT(kArray_GeometrySrcType == geoSrc.fVertexSrc);
509 size_t bytes = geoSrc.fVertexCount * geoSrc.fVertexSize;
510 fVertexPool->putBack(bytes);
511 --fVertexPoolUseCnt;
512 }
513
releaseIndexArray()514 void GrGpu::releaseIndexArray() {
515 // if index source was array, we stowed data in the pool
516 const GeometrySrcState& geoSrc = this->getGeomSrc();
517 SkASSERT(kArray_GeometrySrcType == geoSrc.fIndexSrc);
518 size_t bytes = geoSrc.fIndexCount * sizeof(uint16_t);
519 fIndexPool->putBack(bytes);
520 --fIndexPoolUseCnt;
521 }
522