1 /*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "SkTypes.h"
9
10 #if defined(SK_BUILD_FOR_ANDROID) && __ANDROID_API__ >= 26
11 #define GL_GLEXT_PROTOTYPES
12 #define EGL_EGLEXT_PROTOTYPES
13
14
15 #include "GrAHardwareBufferImageGenerator.h"
16
17 #include <android/hardware_buffer.h>
18
19 #include "GrBackendSurface.h"
20 #include "GrContext.h"
21 #include "GrContextPriv.h"
22 #include "GrProxyProvider.h"
23 #include "GrResourceCache.h"
24 #include "GrResourceProvider.h"
25 #include "GrResourceProviderPriv.h"
26 #include "GrTexture.h"
27 #include "GrTextureProxy.h"
28 #include "SkMessageBus.h"
29 #include "gl/GrGLDefines.h"
30 #include "gl/GrGLTypes.h"
31
32 #include <EGL/egl.h>
33 #include <EGL/eglext.h>
34 #include <GLES/gl.h>
35 #include <GLES/glext.h>
36
37 #ifdef SK_VULKAN
38 #include "vk/GrVkExtensions.h"
39 #include "vk/GrVkGpu.h"
40 #endif
41
42 #define PROT_CONTENT_EXT_STR "EGL_EXT_protected_content"
43 #define EGL_PROTECTED_CONTENT_EXT 0x32C0
44
can_import_protected_content_eglimpl()45 static bool can_import_protected_content_eglimpl() {
46 EGLDisplay dpy = eglGetDisplay(EGL_DEFAULT_DISPLAY);
47 const char* exts = eglQueryString(dpy, EGL_EXTENSIONS);
48 size_t cropExtLen = strlen(PROT_CONTENT_EXT_STR);
49 size_t extsLen = strlen(exts);
50 bool equal = !strcmp(PROT_CONTENT_EXT_STR, exts);
51 bool atStart = !strncmp(PROT_CONTENT_EXT_STR " ", exts, cropExtLen+1);
52 bool atEnd = (cropExtLen+1) < extsLen
53 && !strcmp(" " PROT_CONTENT_EXT_STR,
54 exts + extsLen - (cropExtLen+1));
55 bool inMiddle = strstr(exts, " " PROT_CONTENT_EXT_STR " ");
56 return equal || atStart || atEnd || inMiddle;
57 }
58
can_import_protected_content(GrContext * context)59 static bool can_import_protected_content(GrContext* context) {
60 if (GrBackendApi::kOpenGL == context->backend()) {
61 // Only compute whether the extension is present once the first time this
62 // function is called.
63 static bool hasIt = can_import_protected_content_eglimpl();
64 return hasIt;
65 }
66 return false;
67 }
68
Make(AHardwareBuffer * graphicBuffer,SkAlphaType alphaType,sk_sp<SkColorSpace> colorSpace,GrSurfaceOrigin surfaceOrigin)69 std::unique_ptr<SkImageGenerator> GrAHardwareBufferImageGenerator::Make(
70 AHardwareBuffer* graphicBuffer, SkAlphaType alphaType, sk_sp<SkColorSpace> colorSpace,
71 GrSurfaceOrigin surfaceOrigin) {
72 AHardwareBuffer_Desc bufferDesc;
73 AHardwareBuffer_describe(graphicBuffer, &bufferDesc);
74 SkColorType colorType;
75 switch (bufferDesc.format) {
76 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
77 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
78 colorType = kRGBA_8888_SkColorType;
79 break;
80 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
81 colorType = kRGBA_F16_SkColorType;
82 break;
83 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
84 colorType = kRGB_565_SkColorType;
85 break;
86 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
87 colorType = kRGB_888x_SkColorType;
88 break;
89 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
90 colorType = kRGBA_1010102_SkColorType;
91 break;
92 default:
93 // Given that we only use this texture as a source, colorType will not impact how Skia uses
94 // the texture. The only potential affect this is anticipated to have is that for some
95 // format types if we are not bound as an OES texture we may get invalid results for SKP
96 // capture if we read back the texture.
97 colorType = kRGBA_8888_SkColorType;
98 break;
99 }
100 SkImageInfo info = SkImageInfo::Make(bufferDesc.width, bufferDesc.height, colorType,
101 alphaType, std::move(colorSpace));
102 bool createProtectedImage = 0 != (bufferDesc.usage & AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT);
103 return std::unique_ptr<SkImageGenerator>(new GrAHardwareBufferImageGenerator(
104 info, graphicBuffer, alphaType, createProtectedImage,
105 bufferDesc.format, surfaceOrigin));
106 }
107
GrAHardwareBufferImageGenerator(const SkImageInfo & info,AHardwareBuffer * hardwareBuffer,SkAlphaType alphaType,bool isProtectedContent,uint32_t bufferFormat,GrSurfaceOrigin surfaceOrigin)108 GrAHardwareBufferImageGenerator::GrAHardwareBufferImageGenerator(const SkImageInfo& info,
109 AHardwareBuffer* hardwareBuffer, SkAlphaType alphaType, bool isProtectedContent,
110 uint32_t bufferFormat, GrSurfaceOrigin surfaceOrigin)
111 : INHERITED(info)
112 , fHardwareBuffer(hardwareBuffer)
113 , fBufferFormat(bufferFormat)
114 , fIsProtectedContent(isProtectedContent)
115 , fSurfaceOrigin(surfaceOrigin) {
116 AHardwareBuffer_acquire(fHardwareBuffer);
117 }
118
~GrAHardwareBufferImageGenerator()119 GrAHardwareBufferImageGenerator::~GrAHardwareBufferImageGenerator() {
120 AHardwareBuffer_release(fHardwareBuffer);
121 }
122
123 ///////////////////////////////////////////////////////////////////////////////////////////////////
124
125 #ifdef SK_VULKAN
126
127 class VulkanCleanupHelper {
128 public:
VulkanCleanupHelper(GrVkGpu * gpu,VkImage image,VkDeviceMemory memory)129 VulkanCleanupHelper(GrVkGpu* gpu, VkImage image, VkDeviceMemory memory)
130 : fDevice(gpu->device())
131 , fImage(image)
132 , fMemory(memory)
133 , fDestroyImage(gpu->vkInterface()->fFunctions.fDestroyImage)
134 , fFreeMemory(gpu->vkInterface()->fFunctions.fFreeMemory) {}
~VulkanCleanupHelper()135 ~VulkanCleanupHelper() {
136 fDestroyImage(fDevice, fImage, nullptr);
137 fFreeMemory(fDevice, fMemory, nullptr);
138 }
139 private:
140 VkDevice fDevice;
141 VkImage fImage;
142 VkDeviceMemory fMemory;
143 PFN_vkDestroyImage fDestroyImage;
144 PFN_vkFreeMemory fFreeMemory;
145 };
146
DeleteVkImage(void * context)147 void GrAHardwareBufferImageGenerator::DeleteVkImage(void* context) {
148 VulkanCleanupHelper* cleanupHelper = static_cast<VulkanCleanupHelper*>(context);
149 delete cleanupHelper;
150 }
151
152 #define VK_CALL(X) gpu->vkInterface()->fFunctions.f##X;
153
make_vk_backend_texture(GrContext * context,AHardwareBuffer * hardwareBuffer,int width,int height,GrPixelConfig config,GrAHardwareBufferImageGenerator::DeleteImageProc * deleteProc,GrAHardwareBufferImageGenerator::DeleteImageCtx * deleteCtx,bool isProtectedContent,const GrBackendFormat & backendFormat)154 static GrBackendTexture make_vk_backend_texture(
155 GrContext* context, AHardwareBuffer* hardwareBuffer,
156 int width, int height, GrPixelConfig config,
157 GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
158 GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
159 bool isProtectedContent,
160 const GrBackendFormat& backendFormat) {
161 SkASSERT(context->backend() == GrBackendApi::kVulkan);
162 GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
163
164 VkPhysicalDevice physicalDevice = gpu->physicalDevice();
165 VkDevice device = gpu->device();
166
167 SkASSERT(gpu);
168
169 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
170 return GrBackendTexture();
171 }
172
173 SkASSERT(backendFormat.getVkFormat());
174 VkFormat format = *backendFormat.getVkFormat();
175
176 VkResult err;
177
178 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
179 hwbFormatProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
180 hwbFormatProps.pNext = nullptr;
181
182 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
183 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
184 hwbProps.pNext = &hwbFormatProps;
185
186 err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer, &hwbProps));
187 if (VK_SUCCESS != err) {
188 return GrBackendTexture();
189 }
190
191 VkExternalFormatANDROID externalFormat;
192 externalFormat.sType = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID;
193 externalFormat.pNext = nullptr;
194 externalFormat.externalFormat = 0; // If this is zero it is as if we aren't using this struct.
195
196 const GrVkYcbcrConversionInfo* ycbcrConversion = backendFormat.getVkYcbcrConversionInfo();
197 if (!ycbcrConversion) {
198 return GrBackendTexture();
199 }
200
201 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
202 // TODO: We should not assume the transfer features here and instead should have a way for
203 // Ganesh's tracking of intenral images to report whether or not they support transfers.
204 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures) &&
205 SkToBool(VK_FORMAT_FEATURE_TRANSFER_SRC_BIT & hwbFormatProps.formatFeatures) &&
206 SkToBool(VK_FORMAT_FEATURE_TRANSFER_DST_BIT & hwbFormatProps.formatFeatures));
207 SkASSERT(!ycbcrConversion->isValid());
208 } else {
209 SkASSERT(ycbcrConversion->isValid());
210 // We have an external only format
211 SkASSERT(SkToBool(VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT & hwbFormatProps.formatFeatures));
212 SkASSERT(format == VK_FORMAT_UNDEFINED);
213 SkASSERT(hwbFormatProps.externalFormat == ycbcrConversion->fExternalFormat);
214 externalFormat.externalFormat = hwbFormatProps.externalFormat;
215 }
216 SkASSERT(format == hwbFormatProps.format);
217
218 const VkExternalMemoryImageCreateInfo externalMemoryImageInfo{
219 VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, // sType
220 &externalFormat, // pNext
221 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID, // handleTypes
222 };
223 VkImageUsageFlags usageFlags = VK_IMAGE_USAGE_SAMPLED_BIT;
224 if (format != VK_FORMAT_UNDEFINED) {
225 usageFlags = usageFlags |
226 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
227 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
228 }
229
230 // TODO: Check the supported tilings vkGetPhysicalDeviceImageFormatProperties2 to see if we have
231 // to use linear. Add better linear support throughout Ganesh.
232 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
233
234 const VkImageCreateInfo imageCreateInfo = {
235 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // sType
236 &externalMemoryImageInfo, // pNext
237 0, // VkImageCreateFlags
238 VK_IMAGE_TYPE_2D, // VkImageType
239 format, // VkFormat
240 { (uint32_t)width, (uint32_t)height, 1 }, // VkExtent3D
241 1, // mipLevels
242 1, // arrayLayers
243 VK_SAMPLE_COUNT_1_BIT, // samples
244 tiling, // VkImageTiling
245 usageFlags, // VkImageUsageFlags
246 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode
247 0, // queueFamilyCount
248 0, // pQueueFamilyIndices
249 VK_IMAGE_LAYOUT_UNDEFINED, // initialLayout
250 };
251
252 VkImage image;
253 err = VK_CALL(CreateImage(device, &imageCreateInfo, nullptr, &image));
254 if (VK_SUCCESS != err) {
255 return GrBackendTexture();
256 }
257
258 VkPhysicalDeviceMemoryProperties2 phyDevMemProps;
259 phyDevMemProps.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
260 phyDevMemProps.pNext = nullptr;
261
262 uint32_t typeIndex = 0;
263 uint32_t heapIndex = 0;
264 bool foundHeap = false;
265 VK_CALL(GetPhysicalDeviceMemoryProperties2(physicalDevice, &phyDevMemProps));
266 uint32_t memTypeCnt = phyDevMemProps.memoryProperties.memoryTypeCount;
267 for (uint32_t i = 0; i < memTypeCnt && !foundHeap; ++i) {
268 if (hwbProps.memoryTypeBits & (1 << i)) {
269 const VkPhysicalDeviceMemoryProperties& pdmp = phyDevMemProps.memoryProperties;
270 uint32_t supportedFlags = pdmp.memoryTypes[i].propertyFlags &
271 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
272 if (supportedFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
273 typeIndex = i;
274 heapIndex = pdmp.memoryTypes[i].heapIndex;
275 foundHeap = true;
276 }
277 }
278 }
279 if (!foundHeap) {
280 VK_CALL(DestroyImage(device, image, nullptr));
281 return GrBackendTexture();
282 }
283
284 VkImportAndroidHardwareBufferInfoANDROID hwbImportInfo;
285 hwbImportInfo.sType = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
286 hwbImportInfo.pNext = nullptr;
287 hwbImportInfo.buffer = hardwareBuffer;
288
289 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
290 dedicatedAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO;
291 dedicatedAllocInfo.pNext = &hwbImportInfo;
292 dedicatedAllocInfo.image = image;
293 dedicatedAllocInfo.buffer = VK_NULL_HANDLE;
294
295 VkMemoryAllocateInfo allocInfo = {
296 VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // sType
297 &dedicatedAllocInfo, // pNext
298 hwbProps.allocationSize, // allocationSize
299 typeIndex, // memoryTypeIndex
300 };
301
302 VkDeviceMemory memory;
303
304 err = VK_CALL(AllocateMemory(device, &allocInfo, nullptr, &memory));
305 if (VK_SUCCESS != err) {
306 VK_CALL(DestroyImage(device, image, nullptr));
307 return GrBackendTexture();
308 }
309
310 VkBindImageMemoryInfo bindImageInfo;
311 bindImageInfo.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
312 bindImageInfo.pNext = nullptr;
313 bindImageInfo.image = image;
314 bindImageInfo.memory = memory;
315 bindImageInfo.memoryOffset = 0;
316
317 err = VK_CALL(BindImageMemory2(device, 1, &bindImageInfo));
318 if (VK_SUCCESS != err) {
319 VK_CALL(DestroyImage(device, image, nullptr));
320 VK_CALL(FreeMemory(device, memory, nullptr));
321 return GrBackendTexture();
322 }
323
324 GrVkImageInfo imageInfo;
325
326 imageInfo.fImage = image;
327 imageInfo.fAlloc = GrVkAlloc(memory, 0, hwbProps.allocationSize, 0);
328 imageInfo.fImageTiling = tiling;
329 imageInfo.fImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
330 imageInfo.fFormat = format;
331 imageInfo.fLevelCount = 1;
332 // TODO: This should possibly be VK_QUEUE_FAMILY_FOREIGN_EXT but current Adreno devices do not
333 // support that extension. Or if we know the source of the AHardwareBuffer is not from a
334 // "foreign" device we can leave them as external.
335 imageInfo.fCurrentQueueFamily = VK_QUEUE_FAMILY_EXTERNAL;
336 imageInfo.fYcbcrConversionInfo = *ycbcrConversion;
337
338 *deleteProc = GrAHardwareBufferImageGenerator::DeleteVkImage;
339 *deleteCtx = new VulkanCleanupHelper(gpu, image, memory);
340
341 return GrBackendTexture(width, height, imageInfo);
342 }
343 #endif
344
345 class GLCleanupHelper {
346 public:
GLCleanupHelper(GrGLuint texID,EGLImageKHR image,EGLDisplay display)347 GLCleanupHelper(GrGLuint texID, EGLImageKHR image, EGLDisplay display)
348 : fTexID(texID)
349 , fImage(image)
350 , fDisplay(display) { }
~GLCleanupHelper()351 ~GLCleanupHelper() {
352 glDeleteTextures(1, &fTexID);
353 // eglDestroyImageKHR will remove a ref from the AHardwareBuffer
354 eglDestroyImageKHR(fDisplay, fImage);
355 }
356 private:
357 GrGLuint fTexID;
358 EGLImageKHR fImage;
359 EGLDisplay fDisplay;
360 };
361
DeleteGLTexture(void * context)362 void GrAHardwareBufferImageGenerator::DeleteGLTexture(void* context) {
363 GLCleanupHelper* cleanupHelper = static_cast<GLCleanupHelper*>(context);
364 delete cleanupHelper;
365 }
366
make_gl_backend_texture(GrContext * context,AHardwareBuffer * hardwareBuffer,int width,int height,GrPixelConfig config,GrAHardwareBufferImageGenerator::DeleteImageProc * deleteProc,GrAHardwareBufferImageGenerator::DeleteImageCtx * deleteCtx,bool isProtectedContent,const GrBackendFormat & backendFormat)367 static GrBackendTexture make_gl_backend_texture(
368 GrContext* context, AHardwareBuffer* hardwareBuffer,
369 int width, int height, GrPixelConfig config,
370 GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
371 GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
372 bool isProtectedContent,
373 const GrBackendFormat& backendFormat) {
374 while (GL_NO_ERROR != glGetError()) {} //clear GL errors
375
376 EGLClientBuffer clientBuffer = eglGetNativeClientBufferANDROID(hardwareBuffer);
377 EGLint attribs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE,
378 isProtectedContent ? EGL_PROTECTED_CONTENT_EXT : EGL_NONE,
379 isProtectedContent ? EGL_TRUE : EGL_NONE,
380 EGL_NONE };
381 EGLDisplay display = eglGetCurrentDisplay();
382 // eglCreateImageKHR will add a ref to the AHardwareBuffer
383 EGLImageKHR image = eglCreateImageKHR(display, EGL_NO_CONTEXT, EGL_NATIVE_BUFFER_ANDROID,
384 clientBuffer, attribs);
385 if (EGL_NO_IMAGE_KHR == image) {
386 SkDebugf("Could not create EGL image, err = (%#x)", (int) eglGetError() );
387 return GrBackendTexture();
388 }
389
390 GrGLuint texID;
391 glGenTextures(1, &texID);
392 if (!texID) {
393 eglDestroyImageKHR(display, image);
394 return GrBackendTexture();
395 }
396 glBindTexture(GL_TEXTURE_EXTERNAL_OES, texID);
397 GLenum status = GL_NO_ERROR;
398 if ((status = glGetError()) != GL_NO_ERROR) {
399 SkDebugf("glBindTexture failed (%#x)", (int) status);
400 glDeleteTextures(1, &texID);
401 eglDestroyImageKHR(display, image);
402 return GrBackendTexture();
403 }
404 glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, image);
405 if ((status = glGetError()) != GL_NO_ERROR) {
406 SkDebugf("glEGLImageTargetTexture2DOES failed (%#x)", (int) status);
407 glDeleteTextures(1, &texID);
408 eglDestroyImageKHR(display, image);
409 return GrBackendTexture();
410 }
411 context->resetContext(kTextureBinding_GrGLBackendState);
412
413 GrGLTextureInfo textureInfo;
414 textureInfo.fID = texID;
415 SkASSERT(backendFormat.isValid());
416 textureInfo.fTarget = *backendFormat.getGLTarget();
417 textureInfo.fFormat = *backendFormat.getGLFormat();
418
419 *deleteProc = GrAHardwareBufferImageGenerator::DeleteGLTexture;
420 *deleteCtx = new GLCleanupHelper(texID, image, display);
421
422 return GrBackendTexture(width, height, GrMipMapped::kNo, textureInfo);
423 }
424
make_backend_texture(GrContext * context,AHardwareBuffer * hardwareBuffer,int width,int height,GrPixelConfig config,GrAHardwareBufferImageGenerator::DeleteImageProc * deleteProc,GrAHardwareBufferImageGenerator::DeleteImageCtx * deleteCtx,bool isProtectedContent,const GrBackendFormat & backendFormat)425 static GrBackendTexture make_backend_texture(
426 GrContext* context, AHardwareBuffer* hardwareBuffer,
427 int width, int height, GrPixelConfig config,
428 GrAHardwareBufferImageGenerator::DeleteImageProc* deleteProc,
429 GrAHardwareBufferImageGenerator::DeleteImageCtx* deleteCtx,
430 bool isProtectedContent,
431 const GrBackendFormat& backendFormat) {
432 if (context->abandoned()) {
433 return GrBackendTexture();
434 }
435 bool createProtectedImage = isProtectedContent && can_import_protected_content(context);
436
437 if (GrBackendApi::kOpenGL == context->backend()) {
438 return make_gl_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
439 deleteCtx, createProtectedImage, backendFormat);
440 } else {
441 SkASSERT(GrBackendApi::kVulkan == context->backend());
442 #ifdef SK_VULKAN
443 // Currently we don't support protected images on vulkan
444 SkASSERT(!createProtectedImage);
445 return make_vk_backend_texture(context, hardwareBuffer, width, height, config, deleteProc,
446 deleteCtx, createProtectedImage, backendFormat);
447 #else
448 return GrBackendTexture();
449 #endif
450 }
451 }
452
get_backend_format(GrContext * context,AHardwareBuffer * hardwareBuffer,GrBackendApi backend,uint32_t bufferFormat)453 GrBackendFormat get_backend_format(GrContext* context, AHardwareBuffer* hardwareBuffer,
454 GrBackendApi backend, uint32_t bufferFormat) {
455 if (backend == GrBackendApi::kOpenGL) {
456 switch (bufferFormat) {
457 //TODO: find out if we can detect, which graphic buffers support GR_GL_TEXTURE_2D
458 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
459 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
460 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
461 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
462 return GrBackendFormat::MakeGL(GR_GL_RGBA16F, GR_GL_TEXTURE_EXTERNAL);
463 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
464 return GrBackendFormat::MakeGL(GR_GL_RGB565, GR_GL_TEXTURE_EXTERNAL);
465 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
466 return GrBackendFormat::MakeGL(GR_GL_RGB10_A2, GR_GL_TEXTURE_EXTERNAL);
467 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
468 return GrBackendFormat::MakeGL(GR_GL_RGB8, GR_GL_TEXTURE_EXTERNAL);
469 default:
470 return GrBackendFormat::MakeGL(GR_GL_RGBA8, GR_GL_TEXTURE_EXTERNAL);
471 }
472 } else if (backend == GrBackendApi::kVulkan) {
473 #ifdef SK_VULKAN
474 switch (bufferFormat) {
475 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
476 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
477 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
478 return GrBackendFormat::MakeVk(VK_FORMAT_R16G16B16A16_SFLOAT);
479 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
480 return GrBackendFormat::MakeVk(VK_FORMAT_R5G6B5_UNORM_PACK16);
481 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
482 return GrBackendFormat::MakeVk(VK_FORMAT_A2B10G10R10_UNORM_PACK32);
483 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
484 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8A8_UNORM);
485 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
486 return GrBackendFormat::MakeVk(VK_FORMAT_R8G8B8_UNORM);
487 default: {
488 GrVkGpu* gpu = static_cast<GrVkGpu*>(context->contextPriv().getGpu());
489 SkASSERT(gpu);
490 VkDevice device = gpu->device();
491
492 if (!gpu->vkCaps().supportsAndroidHWBExternalMemory()) {
493 return GrBackendFormat();
494 }
495 VkAndroidHardwareBufferFormatPropertiesANDROID hwbFormatProps;
496 hwbFormatProps.sType =
497 VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
498 hwbFormatProps.pNext = nullptr;
499
500 VkAndroidHardwareBufferPropertiesANDROID hwbProps;
501 hwbProps.sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
502 hwbProps.pNext = &hwbFormatProps;
503
504 VkResult err = VK_CALL(GetAndroidHardwareBufferProperties(device, hardwareBuffer,
505 &hwbProps));
506 if (VK_SUCCESS != err) {
507 return GrBackendFormat();
508 }
509
510 if (hwbFormatProps.format != VK_FORMAT_UNDEFINED) {
511 return GrBackendFormat();
512 }
513
514 GrVkYcbcrConversionInfo ycbcrConversion;
515 ycbcrConversion.fYcbcrModel = hwbFormatProps.suggestedYcbcrModel;
516 ycbcrConversion.fYcbcrRange = hwbFormatProps.suggestedYcbcrRange;
517 ycbcrConversion.fXChromaOffset = hwbFormatProps.suggestedXChromaOffset;
518 ycbcrConversion.fYChromaOffset = hwbFormatProps.suggestedYChromaOffset;
519 ycbcrConversion.fForceExplicitReconstruction = VK_FALSE;
520 ycbcrConversion.fExternalFormat = hwbFormatProps.externalFormat;
521 ycbcrConversion.fExternalFormatFeatures = hwbFormatProps.formatFeatures;
522 if (VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT &
523 hwbFormatProps.formatFeatures) {
524 ycbcrConversion.fChromaFilter = VK_FILTER_LINEAR;
525 } else {
526 ycbcrConversion.fChromaFilter = VK_FILTER_NEAREST;
527 }
528
529 return GrBackendFormat::MakeVk(ycbcrConversion);
530 }
531 }
532 #else
533 return GrBackendFormat();
534 #endif
535 }
536 return GrBackendFormat();
537 }
538
makeProxy(GrContext * context)539 sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::makeProxy(GrContext* context) {
540 if (context->abandoned()) {
541 return nullptr;
542 }
543
544 GrBackendFormat backendFormat = get_backend_format(context, fHardwareBuffer,
545 context->backend(),
546 fBufferFormat);
547 GrPixelConfig pixelConfig = context->contextPriv().caps()->getConfigFromBackendFormat(
548 backendFormat, this->getInfo().colorType());
549
550 if (pixelConfig == kUnknown_GrPixelConfig) {
551 return nullptr;
552 }
553
554 int width = this->getInfo().width();
555 int height = this->getInfo().height();
556
557 GrSurfaceDesc desc;
558 desc.fWidth = width;
559 desc.fHeight = height;
560 desc.fConfig = pixelConfig;
561
562 GrTextureType textureType = GrTextureType::k2D;
563 if (context->backend() == GrBackendApi::kOpenGL) {
564 textureType = GrTextureType::kExternal;
565 } else if (context->backend() == GrBackendApi::kVulkan) {
566 const VkFormat* format = backendFormat.getVkFormat();
567 SkASSERT(format);
568 if (*format == VK_FORMAT_UNDEFINED) {
569 textureType = GrTextureType::kExternal;
570 }
571 }
572
573 auto proxyProvider = context->contextPriv().proxyProvider();
574
575 AHardwareBuffer* hardwareBuffer = fHardwareBuffer;
576 AHardwareBuffer_acquire(hardwareBuffer);
577
578 const bool isProtectedContent = fIsProtectedContent;
579
580 sk_sp<GrTextureProxy> texProxy = proxyProvider->createLazyProxy(
581 [context, hardwareBuffer, width, height, pixelConfig, isProtectedContent,
582 backendFormat](GrResourceProvider* resourceProvider) {
583 if (!resourceProvider) {
584 AHardwareBuffer_release(hardwareBuffer);
585 return sk_sp<GrTexture>();
586 }
587
588 DeleteImageProc deleteImageProc = nullptr;
589 DeleteImageCtx deleteImageCtx = nullptr;
590
591 GrBackendTexture backendTex = make_backend_texture(context, hardwareBuffer,
592 width, height, pixelConfig,
593 &deleteImageProc,
594 &deleteImageCtx,
595 isProtectedContent,
596 backendFormat);
597 if (!backendTex.isValid()) {
598 return sk_sp<GrTexture>();
599 }
600 SkASSERT(deleteImageProc && deleteImageCtx);
601
602 backendTex.fConfig = pixelConfig;
603 // We make this texture cacheable to avoid recreating a GrTexture every time this
604 // is invoked. We know the owning SkIamge will send an invalidation message when the
605 // image is destroyed, so the texture will be removed at that time.
606 sk_sp<GrTexture> tex = resourceProvider->wrapBackendTexture(
607 backendTex, kBorrow_GrWrapOwnership, GrWrapCacheable::kYes, kRead_GrIOType);
608 if (!tex) {
609 deleteImageProc(deleteImageCtx);
610 return sk_sp<GrTexture>();
611 }
612
613 if (deleteImageProc) {
614 sk_sp<GrReleaseProcHelper> releaseProcHelper(
615 new GrReleaseProcHelper(deleteImageProc, deleteImageCtx));
616 tex->setRelease(releaseProcHelper);
617 }
618
619 return tex;
620 },
621 backendFormat, desc, fSurfaceOrigin, GrMipMapped::kNo,
622 GrInternalSurfaceFlags::kReadOnly, SkBackingFit::kExact, SkBudgeted::kNo);
623
624 if (!texProxy) {
625 AHardwareBuffer_release(hardwareBuffer);
626 }
627 return texProxy;
628 }
629
onGenerateTexture(GrContext * context,const SkImageInfo & info,const SkIPoint & origin,bool willNeedMipMaps)630 sk_sp<GrTextureProxy> GrAHardwareBufferImageGenerator::onGenerateTexture(
631 GrContext* context, const SkImageInfo& info, const SkIPoint& origin, bool willNeedMipMaps) {
632 sk_sp<GrTextureProxy> texProxy = this->makeProxy(context);
633 if (!texProxy) {
634 return nullptr;
635 }
636
637 if (0 == origin.fX && 0 == origin.fY &&
638 info.width() == this->getInfo().width() && info.height() == this->getInfo().height()) {
639 // If the caller wants the full texture we're done. The caller will handle making a copy for
640 // mip maps if that is required.
641 return texProxy;
642 }
643 // Otherwise, make a copy for the requested subset.
644 SkIRect subset = SkIRect::MakeXYWH(origin.fX, origin.fY, info.width(), info.height());
645
646 GrMipMapped mipMapped = willNeedMipMaps ? GrMipMapped::kYes : GrMipMapped::kNo;
647
648 return GrSurfaceProxy::Copy(context, texProxy.get(), mipMapped, subset, SkBackingFit::kExact,
649 SkBudgeted::kYes);
650 }
651
onIsValid(GrContext * context) const652 bool GrAHardwareBufferImageGenerator::onIsValid(GrContext* context) const {
653 if (nullptr == context) {
654 return false; //CPU backend is not supported, because hardware buffer can be swizzled
655 }
656 return GrBackendApi::kOpenGL == context->backend() ||
657 GrBackendApi::kVulkan == context->backend();
658 }
659
660 #endif //SK_BUILD_FOR_ANDROID_FRAMEWORK
661