1 // Copyright 2018 The Android Open Source Project
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expresso or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 #include "VkDecoderGlobalState.h"
15 
16 #include <algorithm>
17 #include <functional>
18 #include <list>
19 #include <memory>
20 #include <mutex>
21 #include <unordered_map>
22 #include <vector>
23 
24 #include "BlobManager.h"
25 #include "FrameBuffer.h"
26 #include "RenderThreadInfoVk.h"
27 #include "VkAndroidNativeBuffer.h"
28 #include "VkCommonOperations.h"
29 #include "VkDecoderContext.h"
30 #include "VkDecoderInternalStructs.h"
31 #include "VkDecoderSnapshot.h"
32 #include "VkDecoderSnapshotUtils.h"
33 #include "VkEmulatedPhysicalDeviceMemory.h"
34 #include "VulkanDispatch.h"
35 #include "VulkanStream.h"
36 #include "aemu/base/ManagedDescriptor.hpp"
37 #include "aemu/base/Optional.h"
38 #include "aemu/base/Tracing.h"
39 #include "aemu/base/containers/EntityManager.h"
40 #include "aemu/base/containers/HybridEntityManager.h"
41 #include "aemu/base/containers/Lookup.h"
42 #include "aemu/base/files/Stream.h"
43 #include "aemu/base/memory/SharedMemory.h"
44 #include "aemu/base/synchronization/ConditionVariable.h"
45 #include "aemu/base/synchronization/Lock.h"
46 #include "aemu/base/system/System.h"
47 #include "common/goldfish_vk_deepcopy.h"
48 #include "common/goldfish_vk_dispatch.h"
49 #include "common/goldfish_vk_marshaling.h"
50 #include "common/goldfish_vk_reserved_marshaling.h"
51 #include "compressedTextureFormats/AstcCpuDecompressor.h"
52 #include "host-common/GfxstreamFatalError.h"
53 #include "host-common/HostmemIdMapping.h"
54 #include "host-common/address_space_device_control_ops.h"
55 #include "host-common/emugl_vm_operations.h"
56 #include "host-common/vm_operations.h"
57 #include "utils/RenderDoc.h"
58 #include "vk_util.h"
59 #include "vulkan/VkFormatUtils.h"
60 #include "vulkan/emulated_textures/AstcTexture.h"
61 #include "vulkan/emulated_textures/CompressedImageInfo.h"
62 #include "vulkan/emulated_textures/GpuDecompressionPipeline.h"
63 #include "vulkan/vk_enum_string_helper.h"
64 
65 #ifndef _WIN32
66 #include <unistd.h>
67 #endif
68 
69 #ifdef __APPLE__
70 #include <CoreFoundation/CoreFoundation.h>
71 #include <vulkan/vulkan_beta.h> // for MoltenVK portability extensions
72 #endif
73 
74 #include <climits>
75 
76 namespace gfxstream {
77 namespace vk {
78 
79 using android::base::AutoLock;
80 using android::base::ConditionVariable;
81 using android::base::DescriptorType;
82 using android::base::Lock;
83 using android::base::ManagedDescriptor;
84 using android::base::MetricEventBadPacketLength;
85 using android::base::MetricEventDuplicateSequenceNum;
86 using android::base::MetricEventVulkanOutOfMemory;
87 using android::base::Optional;
88 using android::base::SharedMemory;
89 using android::base::StaticLock;
90 using android::emulation::ManagedDescriptorInfo;
91 using emugl::ABORT_REASON_OTHER;
92 using emugl::FatalError;
93 using emugl::GfxApiLogger;
94 using gfxstream::BlobManager;
95 using gfxstream::VulkanInfo;
96 
97 // TODO(b/261477138): Move to a shared aemu definition
98 #define __ALIGN_MASK(x, mask) (((x) + (mask)) & ~(mask))
99 #define __ALIGN(x, a) __ALIGN_MASK(x, (__typeof__(x))(a)-1)
100 
101 // TODO: Asserts build
102 #define DCHECK(condition) (void)(condition);
103 
104 #define VKDGS_DEBUG 0
105 
106 #if VKDGS_DEBUG
107 #define VKDGS_LOG(fmt, ...) fprintf(stderr, "%s:%d " fmt "\n", __func__, __LINE__, ##__VA_ARGS__);
108 #else
109 #define VKDGS_LOG(fmt, ...)
110 #endif
111 
112 // Blob mem
113 #define STREAM_BLOB_MEM_GUEST 1
114 #define STREAM_BLOB_MEM_HOST3D 2
115 #define STREAM_BLOB_MEM_HOST3D_GUEST 3
116 
117 // Blob flags
118 #define STREAM_BLOB_FLAG_USE_MAPPABLE 1
119 #define STREAM_BLOB_FLAG_USE_SHAREABLE 2
120 #define STREAM_BLOB_FLAG_USE_CROSS_DEVICE 4
121 #define STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE 8
122 
123 #define VALIDATE_REQUIRED_HANDLE(parameter) \
124     validateRequiredHandle(__FUNCTION__, #parameter, parameter)
125 
126 template <typename T>
validateRequiredHandle(const char * api_name,const char * parameter_name,T value)127 void validateRequiredHandle(const char* api_name, const char* parameter_name, T value) {
128     if (value == VK_NULL_HANDLE) {
129         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << api_name << ":" << parameter_name;
130     }
131 }
132 
dupExternalSync(VK_EXT_SYNC_HANDLE h)133 VK_EXT_SYNC_HANDLE dupExternalSync(VK_EXT_SYNC_HANDLE h) {
134 #ifdef _WIN32
135     auto myProcessHandle = GetCurrentProcess();
136     VK_EXT_SYNC_HANDLE res;
137     DuplicateHandle(myProcessHandle, h,     // source process and handle
138                     myProcessHandle, &res,  // target process and pointer to handle
139                     0 /* desired access (ignored) */, true /* inherit */,
140                     DUPLICATE_SAME_ACCESS /* same access option */);
141     return res;
142 #else
143     return dup(h);
144 #endif
145 }
146 
147 // A list of device extensions that should not be passed to the host driver.
148 // These will mainly include Vulkan features that we emulate ourselves.
149 static constexpr const char* const kEmulatedDeviceExtensions[] = {
150     "VK_ANDROID_external_memory_android_hardware_buffer",
151     "VK_ANDROID_native_buffer",
152     "VK_FUCHSIA_buffer_collection",
153     "VK_FUCHSIA_external_memory",
154     "VK_FUCHSIA_external_semaphore",
155     VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME,
156     VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME,
157     VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME,
158     VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME,
159     VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME,
160     VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME,
161     VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME,
162     VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
163 #if defined(__QNX__)
164     VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME,
165     VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME,
166 #endif
167 };
168 
169 // A list of instance extensions that should not be passed to the host driver.
170 // On older pre-1.1 Vulkan platforms, gfxstream emulates these features.
171 static constexpr const char* const kEmulatedInstanceExtensions[] = {
172     VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME,
173     VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME,
174     VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME,
175 };
176 
177 static constexpr uint32_t kMaxSafeVersion = VK_MAKE_VERSION(1, 3, 0);
178 static constexpr uint32_t kMinVersion = VK_MAKE_VERSION(1, 0, 0);
179 
180 static constexpr uint64_t kPageSizeforBlob = 4096;
181 static constexpr uint64_t kPageMaskForBlob = ~(0xfff);
182 
183 static uint64_t hostBlobId = 0;
184 
185 // b/319729462
186 // On snapshot load, thread local data is not available, thus we use a
187 // fake context ID. We will eventually need to fix it once we start using
188 // snapshot with virtio.
189 static uint32_t kTemporaryContextIdForSnapshotLoading = 1;
190 
191 #define DEFINE_BOXED_HANDLE_TYPE_TAG(type) Tag_##type,
192 
193 enum BoxedHandleTypeTag {
194     Tag_Invalid = 0,
195     GOLDFISH_VK_LIST_HANDLE_TYPES_BY_STAGE(DEFINE_BOXED_HANDLE_TYPE_TAG)
196 };
197 
198 template <class T>
199 class BoxedHandleManager {
200    public:
201     // The hybrid entity manager uses a sequence lock to protect access to
202     // a working set of 16000 handles, allowing us to avoid using a regular
203     // lock for those. Performance is degraded when going over this number,
204     // as it will then fall back to a std::map.
205     //
206     // We use 16000 as the max number of live handles to track; we don't
207     // expect the system to go over 16000 total live handles, outside some
208     // dEQP object management tests.
209     using Store = android::base::HybridEntityManager<16000, uint64_t, T>;
210 
211     Lock lock;
212     mutable Store store;
213     std::unordered_map<uint64_t, uint64_t> reverseMap;
214     struct DelayedRemove {
215         uint64_t handle;
216         std::function<void()> callback;
217     };
218     std::unordered_map<VkDevice, std::vector<DelayedRemove>> delayedRemoves;
219 
clear()220     void clear() {
221         reverseMap.clear();
222         store.clear();
223     }
224 
add(const T & item,BoxedHandleTypeTag tag)225     uint64_t add(const T& item, BoxedHandleTypeTag tag) {
226         auto res = (uint64_t)store.add(item, (size_t)tag);
227         AutoLock l(lock);
228         reverseMap[(uint64_t)(item.underlying)] = res;
229         return res;
230     }
231 
addFixed(uint64_t handle,const T & item,BoxedHandleTypeTag tag)232     uint64_t addFixed(uint64_t handle, const T& item, BoxedHandleTypeTag tag) {
233         auto res = (uint64_t)store.addFixed(handle, item, (size_t)tag);
234         AutoLock l(lock);
235         reverseMap[(uint64_t)(item.underlying)] = res;
236         return res;
237     }
238 
update(uint64_t handle,const T & item,BoxedHandleTypeTag tag)239     void update(uint64_t handle, const T& item, BoxedHandleTypeTag tag) {
240         auto storedItem = store.get(handle);
241         uint64_t oldHandle = (uint64_t)storedItem->underlying;
242         *storedItem = item;
243         AutoLock l(lock);
244         if (oldHandle) {
245             reverseMap.erase(oldHandle);
246         }
247         reverseMap[(uint64_t)(item.underlying)] = handle;
248     }
249 
remove(uint64_t h)250     void remove(uint64_t h) {
251         auto item = get(h);
252         if (item) {
253             AutoLock l(lock);
254             reverseMap.erase((uint64_t)(item->underlying));
255         }
256         store.remove(h);
257     }
258 
removeDelayed(uint64_t h,VkDevice device,std::function<void ()> callback)259     void removeDelayed(uint64_t h, VkDevice device, std::function<void()> callback) {
260         AutoLock l(lock);
261         delayedRemoves[device].push_back({h, callback});
262     }
263 
processDelayedRemovesGlobalStateLocked(VkDevice device)264     void processDelayedRemovesGlobalStateLocked(VkDevice device) {
265         AutoLock l(lock);
266         auto it = delayedRemoves.find(device);
267         if (it == delayedRemoves.end()) return;
268         auto& delayedRemovesList = it->second;
269         for (const auto& r : delayedRemovesList) {
270             auto h = r.handle;
271             // VkDecoderGlobalState is already locked when callback is called.
272             if (r.callback) {
273                 r.callback();
274             }
275             store.remove(h);
276         }
277         delayedRemovesList.clear();
278         delayedRemoves.erase(it);
279     }
280 
get(uint64_t h)281     T* get(uint64_t h) { return (T*)store.get_const(h); }
282 
getBoxedFromUnboxedLocked(uint64_t unboxed)283     uint64_t getBoxedFromUnboxedLocked(uint64_t unboxed) {
284         auto* res = android::base::find(reverseMap, unboxed);
285         if (!res) return 0;
286         return *res;
287     }
288 };
289 
290 struct OrderMaintenanceInfo {
291     uint32_t sequenceNumber = 0;
292     Lock lock;
293     ConditionVariable cv;
294 
295     uint32_t refcount = 1;
296 
incRefgfxstream::vk::OrderMaintenanceInfo297     void incRef() { __atomic_add_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
298 
decRefgfxstream::vk::OrderMaintenanceInfo299     bool decRef() { return 0 == __atomic_sub_fetch(&refcount, 1, __ATOMIC_SEQ_CST); }
300 };
301 
acquireOrderMaintInfo(OrderMaintenanceInfo * ord)302 static void acquireOrderMaintInfo(OrderMaintenanceInfo* ord) {
303     if (!ord) return;
304     ord->incRef();
305 }
306 
releaseOrderMaintInfo(OrderMaintenanceInfo * ord)307 static void releaseOrderMaintInfo(OrderMaintenanceInfo* ord) {
308     if (!ord) return;
309     if (ord->decRef()) delete ord;
310 }
311 
312 template <class T>
313 class DispatchableHandleInfo {
314    public:
315     T underlying;
316     VulkanDispatch* dispatch = nullptr;
317     bool ownDispatch = false;
318     OrderMaintenanceInfo* ordMaintInfo = nullptr;
319     VulkanMemReadingStream* readStream = nullptr;
320 };
321 
322 static BoxedHandleManager<DispatchableHandleInfo<uint64_t>> sBoxedHandleManager;
323 
324 struct ReadStreamRegistry {
325     Lock mLock;
326 
327     std::vector<VulkanMemReadingStream*> freeStreams;
328 
ReadStreamRegistrygfxstream::vk::ReadStreamRegistry329     ReadStreamRegistry() { freeStreams.reserve(100); };
330 
popgfxstream::vk::ReadStreamRegistry331     VulkanMemReadingStream* pop(const gfxstream::host::FeatureSet& features) {
332         AutoLock lock(mLock);
333         if (freeStreams.empty()) {
334             return new VulkanMemReadingStream(nullptr, features);
335         } else {
336             VulkanMemReadingStream* res = freeStreams.back();
337             freeStreams.pop_back();
338             return res;
339         }
340     }
341 
pushgfxstream::vk::ReadStreamRegistry342     void push(VulkanMemReadingStream* stream) {
343         AutoLock lock(mLock);
344         freeStreams.push_back(stream);
345     }
346 };
347 
348 static ReadStreamRegistry sReadStreamRegistry;
349 
350 class VkDecoderGlobalState::Impl {
351    public:
Impl()352     Impl()
353         : m_vk(vkDispatch()),
354           m_emu(getGlobalVkEmulation()),
355           mRenderDocWithMultipleVkInstances(m_emu->guestRenderDoc.get()) {
356         mSnapshotsEnabled = m_emu->features.VulkanSnapshots.enabled;
357         mVkCleanupEnabled =
358             android::base::getEnvironmentVariable("ANDROID_EMU_VK_NO_CLEANUP") != "1";
359         mLogging = android::base::getEnvironmentVariable("ANDROID_EMU_VK_LOG_CALLS") == "1";
360         mVerbosePrints = android::base::getEnvironmentVariable("ANDROID_EMUGL_VERBOSE") == "1";
361         if (get_emugl_address_space_device_control_ops().control_get_hw_funcs &&
362             get_emugl_address_space_device_control_ops().control_get_hw_funcs()) {
363             mUseOldMemoryCleanupPath = 0 == get_emugl_address_space_device_control_ops()
364                                                 .control_get_hw_funcs()
365                                                 ->getPhysAddrStartLocked();
366         }
367         mGuestUsesAngle = m_emu->features.GuestUsesAngle.enabled;
368     }
369 
370     ~Impl() = default;
371 
372     // Resets all internal tracking info.
373     // Assumes that the heavyweight cleanup operations
374     // have already happened.
clear()375     void clear() {
376         mInstanceInfo.clear();
377         mPhysdevInfo.clear();
378         mDeviceInfo.clear();
379         mImageInfo.clear();
380         mImageViewInfo.clear();
381         mSamplerInfo.clear();
382         mCmdBufferInfo.clear();
383         mCmdPoolInfo.clear();
384         mDeviceToPhysicalDevice.clear();
385         mPhysicalDeviceToInstance.clear();
386         mQueueInfo.clear();
387         mBufferInfo.clear();
388         mMemoryInfo.clear();
389         mShaderModuleInfo.clear();
390         mPipelineCacheInfo.clear();
391         mPipelineInfo.clear();
392         mRenderPassInfo.clear();
393         mFramebufferInfo.clear();
394         mSemaphoreInfo.clear();
395         mFenceInfo.clear();
396 #ifdef _WIN32
397         mSemaphoreId = 1;
398         mExternalSemaphoresById.clear();
399 #endif
400         mDescriptorUpdateTemplateInfo.clear();
401 
402         mCreatedHandlesForSnapshotLoad.clear();
403         mCreatedHandlesForSnapshotLoadIndex = 0;
404 
405         sBoxedHandleManager.clear();
406     }
407 
snapshotsEnabled() const408     bool snapshotsEnabled() const { return mSnapshotsEnabled; }
409 
vkCleanupEnabled() const410     bool vkCleanupEnabled() const { return mVkCleanupEnabled; }
411 
getFeatures() const412     const gfxstream::host::FeatureSet& getFeatures() const { return m_emu->features; }
413 
createSnapshotStateBlock(VkDevice unboxed_device)414     StateBlock createSnapshotStateBlock(VkDevice unboxed_device) {
415             const auto& device = unboxed_device;
416             const auto& deviceInfo = android::base::find(mDeviceInfo, device);
417             const auto physicalDevice = deviceInfo->physicalDevice;
418             const auto& physicalDeviceInfo = android::base::find(mPhysdevInfo, physicalDevice);
419             const auto& instanceInfo = android::base::find(mInstanceInfo, physicalDeviceInfo->instance);
420 
421             VulkanDispatch* ivk = dispatch_VkInstance(instanceInfo->boxed);
422             VulkanDispatch* dvk = dispatch_VkDevice(deviceInfo->boxed);
423 
424             StateBlock stateBlock{
425                 .physicalDevice = physicalDevice,
426                 .physicalDeviceInfo = physicalDeviceInfo,
427                 .device = device,
428                 .deviceDispatch = dvk,
429                 .queue = VK_NULL_HANDLE,
430                 .commandPool = VK_NULL_HANDLE,
431             };
432 
433             uint32_t queueFamilyCount = 0;
434             ivk->vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, &queueFamilyCount,
435                                                           nullptr);
436             std::vector<VkQueueFamilyProperties> queueFamilyProps(queueFamilyCount);
437             ivk->vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, &queueFamilyCount,
438                                                           queueFamilyProps.data());
439             uint32_t queueFamilyIndex = 0;
440             for (auto queue : deviceInfo->queues) {
441                 int idx = queue.first;
442                 if ((queueFamilyProps[idx].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0) {
443                     continue;
444                 }
445                 stateBlock.queue = queue.second[0];
446                 queueFamilyIndex = idx;
447                 break;
448             }
449 
450             VkCommandPoolCreateInfo commandPoolCi = {
451                 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
452                 0,
453                 VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
454                 queueFamilyIndex,
455             };
456             dvk->vkCreateCommandPool(device, &commandPoolCi, nullptr, &stateBlock.commandPool);
457             return stateBlock;
458     }
459 
releaseSnapshotStateBlock(const StateBlock * stateBlock)460     void releaseSnapshotStateBlock(const StateBlock* stateBlock) {
461         stateBlock->deviceDispatch->vkDestroyCommandPool(stateBlock->device, stateBlock->commandPool, nullptr);
462     }
463 
save(android::base::Stream * stream)464     void save(android::base::Stream* stream) {
465         mSnapshotState = SnapshotState::Saving;
466         snapshot()->save(stream);
467         // Save mapped memory
468         uint32_t memoryCount = 0;
469         for (const auto& it : mMemoryInfo) {
470             if (it.second.ptr) {
471                 memoryCount++;
472             }
473         }
474         stream->putBe32(memoryCount);
475         for (const auto& it : mMemoryInfo) {
476             if (!it.second.ptr) {
477                 continue;
478             }
479             stream->putBe64(reinterpret_cast<uint64_t>(
480                 unboxed_to_boxed_non_dispatchable_VkDeviceMemory(it.first)));
481             stream->putBe64(it.second.size);
482             stream->write(it.second.ptr, it.second.size);
483         }
484 
485         // Set up VK structs to snapshot other Vulkan objects
486         // TODO(b/323064243): group all images from the same device and reuse queue / command pool
487 
488         std::vector<VkImage> sortedBoxedImages;
489         for (const auto& imageIte : mImageInfo) {
490             sortedBoxedImages.push_back(unboxed_to_boxed_non_dispatchable_VkImage(imageIte.first));
491         }
492         // Image contents need to be saved and loaded in the same order.
493         // So sort them (by boxed handles) first.
494         std::sort(sortedBoxedImages.begin(), sortedBoxedImages.end());
495         for (const auto& boxedImage : sortedBoxedImages) {
496             auto unboxedImage = unbox_VkImage(boxedImage);
497             const ImageInfo& imageInfo = mImageInfo[unboxedImage];
498             if (imageInfo.memory == VK_NULL_HANDLE) {
499                 continue;
500             }
501             // Vulkan command playback doesn't recover image layout. We need to do it here.
502             stream->putBe32(imageInfo.layout);
503 
504             StateBlock stateBlock = createSnapshotStateBlock(imageInfo.device);
505             // TODO(b/294277842): make sure the queue is empty before using.
506             saveImageContent(stream, &stateBlock, unboxedImage, &imageInfo);
507             releaseSnapshotStateBlock(&stateBlock);
508         }
509 
510         // snapshot buffers
511         std::vector<VkBuffer> sortedBoxedBuffers;
512         for (const auto& bufferIte : mBufferInfo) {
513             sortedBoxedBuffers.push_back(
514                 unboxed_to_boxed_non_dispatchable_VkBuffer(bufferIte.first));
515         }
516         sort(sortedBoxedBuffers.begin(), sortedBoxedBuffers.end());
517         for (const auto& boxedBuffer : sortedBoxedBuffers) {
518             auto unboxedBuffer = unbox_VkBuffer(boxedBuffer);
519             const BufferInfo& bufferInfo = mBufferInfo[unboxedBuffer];
520             if (bufferInfo.memory == VK_NULL_HANDLE) {
521                 continue;
522             }
523             // TODO: add a special case for host mapped memory
524             StateBlock stateBlock = createSnapshotStateBlock(bufferInfo.device);
525 
526             // TODO(b/294277842): make sure the queue is empty before using.
527             saveBufferContent(stream, &stateBlock, unboxedBuffer, &bufferInfo);
528             releaseSnapshotStateBlock(&stateBlock);
529         }
530 
531         // snapshot descriptors
532         std::vector<VkDescriptorPool> sortedBoxedDescriptorPools;
533         for (const auto& descriptorPoolIte : mDescriptorPoolInfo) {
534             auto boxed =
535                 unboxed_to_boxed_non_dispatchable_VkDescriptorPool(descriptorPoolIte.first);
536             sortedBoxedDescriptorPools.push_back(boxed);
537         }
538         std::sort(sortedBoxedDescriptorPools.begin(), sortedBoxedDescriptorPools.end());
539         for (const auto& boxedDescriptorPool : sortedBoxedDescriptorPools) {
540             auto unboxedDescriptorPool = unbox_VkDescriptorPool(boxedDescriptorPool);
541             const DescriptorPoolInfo& poolInfo = mDescriptorPoolInfo[unboxedDescriptorPool];
542 
543             for (uint64_t poolId : poolInfo.poolIds) {
544                 DispatchableHandleInfo<uint64_t>* setHandleInfo = sBoxedHandleManager.get(poolId);
545                 bool allocated = setHandleInfo->underlying != 0;
546                 stream->putByte(allocated);
547                 if (!allocated) {
548                     continue;
549                 }
550 
551                 const DescriptorSetInfo& descriptorSetInfo =
552                     mDescriptorSetInfo[(VkDescriptorSet)setHandleInfo->underlying];
553                 VkDescriptorSetLayout boxedLayout =
554                     unboxed_to_boxed_non_dispatchable_VkDescriptorSetLayout(
555                         descriptorSetInfo.unboxedLayout);
556                 stream->putBe64((uint64_t)boxedLayout);
557                 // Count all valid descriptors.
558                 //
559                 // There is a use case where user can create an image, write it to a descriptor,
560                 // read/write the image by committing a command, then delete the image without
561                 // unbinding the descriptor. For example:
562                 //
563                 // T1: create "vkimage1" (original)
564                 // T2: update binding1 of vkdescriptorset1 with vkimage1
565                 // T3: draw
566                 // T4: delete "vkimage1" (original)
567                 // T5: create "vkimage1" (recycled)
568                 // T6: snapshot load
569                 //
570                 // At the point of the snapshot, the original vk image has been invalidated,
571                 // thus we cannot call vkUpdateDescriptorSets for it, and need to remove it
572                 // from the snapshot.
573                 //
574                 // The current implementation bases on smart pointers. A descriptor set info
575                 // holds weak pointers to their underlying resources (image, image view, buffer).
576                 // On snapshot load, we check if any of the smart pointers are invalidated.
577                 //
578                 // An alternative approach has been discussed by, instead of using smart
579                 // pointers, checking valid handles on snapshot save. This approach has the
580                 // advantage that it reduces number of smart pointer allocations. After discussion
581                 // we concluded that there is at least one corner case that will break the
582                 // alternative approach. That is when the user deletes a bound vkimage and creates
583                 // a new vkimage. The driver is free to reuse released handles, thus we might
584                 // end up having a new vkimage with the same handle as the old one (see T5 in the
585                 // example), and think the binding is still valid. And if we bind the new image
586                 // regardless, we might hit a Vulkan validation error because the new image might
587                 // have the "usage" flag that is unsuitable to bind to descriptors.
588                 std::vector<std::pair<int, int>> validWriteIndices;
589                 for (int bindingIdx = 0; bindingIdx < descriptorSetInfo.allWrites.size();
590                      bindingIdx++) {
591                     for (int bindingElemIdx = 0;
592                          bindingElemIdx < descriptorSetInfo.allWrites[bindingIdx].size();
593                          bindingElemIdx++) {
594                         const auto& entry = descriptorSetInfo.allWrites[bindingIdx][bindingElemIdx];
595                         if (entry.writeType == DescriptorSetInfo::DescriptorWriteType::Empty) {
596                             continue;
597                         }
598                         int dependencyObjCount =
599                             descriptorDependencyObjectCount(entry.descriptorType);
600                         if (entry.alives.size() < dependencyObjCount) {
601                             continue;
602                         }
603                         bool isValid = true;
604                         for (const auto& alive : entry.alives) {
605                             isValid &= !alive.expired();
606                             if (!isValid) {
607                                 break;
608                             }
609                         }
610                         if (!isValid) {
611                             continue;
612                         }
613                         validWriteIndices.push_back(std::make_pair(bindingIdx, bindingElemIdx));
614                     }
615                 }
616                 stream->putBe64(validWriteIndices.size());
617                 // Save all valid descriptors
618                 for (const auto& idx : validWriteIndices) {
619                     const auto& entry = descriptorSetInfo.allWrites[idx.first][idx.second];
620                     stream->putBe32(idx.first);
621                     stream->putBe32(idx.second);
622                     stream->putBe32(entry.writeType);
623                     // entry.descriptorType might be redundant.
624                     stream->putBe32(entry.descriptorType);
625                     switch (entry.writeType) {
626                         case DescriptorSetInfo::DescriptorWriteType::ImageInfo: {
627                             VkDescriptorImageInfo imageInfo = entry.imageInfo;
628                             // Get the unboxed version
629                             imageInfo.imageView =
630                                 descriptorTypeContainsImage(entry.descriptorType)
631                                     ? unboxed_to_boxed_non_dispatchable_VkImageView(
632                                           imageInfo.imageView)
633                                     : VK_NULL_HANDLE;
634                             imageInfo.sampler =
635                                 descriptorTypeContainsSampler(entry.descriptorType)
636                                     ? unboxed_to_boxed_non_dispatchable_VkSampler(imageInfo.sampler)
637                                     : VK_NULL_HANDLE;
638                             stream->write(&imageInfo, sizeof(imageInfo));
639                         } break;
640                         case DescriptorSetInfo::DescriptorWriteType::BufferInfo: {
641                             VkDescriptorBufferInfo bufferInfo = entry.bufferInfo;
642                             // Get the unboxed version
643                             bufferInfo.buffer =
644                                 unboxed_to_boxed_non_dispatchable_VkBuffer(bufferInfo.buffer);
645                             stream->write(&bufferInfo, sizeof(bufferInfo));
646                         } break;
647                         case DescriptorSetInfo::DescriptorWriteType::BufferView: {
648                             // Get the unboxed version
649                             VkBufferView bufferView =
650                                 unboxed_to_boxed_non_dispatchable_VkBufferView(entry.bufferView);
651                             stream->write(&bufferView, sizeof(bufferView));
652                         } break;
653                         case DescriptorSetInfo::DescriptorWriteType::InlineUniformBlock:
654                         case DescriptorSetInfo::DescriptorWriteType::AccelerationStructure:
655                             // TODO
656                             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
657                                 << "Encountered pending inline uniform block or acceleration "
658                                    "structure "
659                                    "desc write, abort (NYI)";
660                         default:
661                             break;
662                     }
663                 }
664             }
665         }
666         mSnapshotState = SnapshotState::Normal;
667     }
668 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)669     void load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
670               HealthMonitor<>* healthMonitor) {
671         // assume that we already destroyed all instances
672         // from FrameBuffer's onLoad method.
673 
674         // destroy all current internal data structures
675         clear();
676         mSnapshotState = SnapshotState::Loading;
677         android::base::BumpPool bumpPool;
678         // this part will replay in the decoder
679         snapshot()->load(stream, gfxLogger, healthMonitor);
680         // load mapped memory
681         uint32_t memoryCount = stream->getBe32();
682         for (uint32_t i = 0; i < memoryCount; i++) {
683             VkDeviceMemory boxedMemory = reinterpret_cast<VkDeviceMemory>(stream->getBe64());
684             VkDeviceMemory unboxedMemory = unbox_VkDeviceMemory(boxedMemory);
685             auto it = mMemoryInfo.find(unboxedMemory);
686             if (it == mMemoryInfo.end()) {
687                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
688                     << "Snapshot load failure: cannot find memory handle for " << boxedMemory;
689             }
690             VkDeviceSize size = stream->getBe64();
691             if (size != it->second.size || !it->second.ptr) {
692                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
693                     << "Snapshot load failure: memory size does not match for " << boxedMemory;
694             }
695             stream->read(it->second.ptr, size);
696         }
697         // Set up VK structs to snapshot other Vulkan objects
698         // TODO(b/323064243): group all images from the same device and reuse queue / command pool
699 
700         std::vector<VkImage> sortedBoxedImages;
701         for (const auto& imageIte : mImageInfo) {
702             sortedBoxedImages.push_back(unboxed_to_boxed_non_dispatchable_VkImage(imageIte.first));
703         }
704         sort(sortedBoxedImages.begin(), sortedBoxedImages.end());
705         for (const auto& boxedImage : sortedBoxedImages) {
706             auto unboxedImage = unbox_VkImage(boxedImage);
707             ImageInfo& imageInfo = mImageInfo[unboxedImage];
708             if (imageInfo.memory == VK_NULL_HANDLE) {
709                 continue;
710             }
711             // Playback doesn't recover image layout. We need to do it here.
712             //
713             // Layout transform was done by vkCmdPipelineBarrier but we don't record such command
714             // directly. Instead, we memorize the current layout and add our own
715             // vkCmdPipelineBarrier after load.
716             //
717             // We do the layout transform in loadImageContent. There are still use cases where it
718             // should recover the layout but does not.
719             //
720             // TODO(b/323059453): fix corner cases when image contents cannot be properly loaded.
721             imageInfo.layout = static_cast<VkImageLayout>(stream->getBe32());
722             StateBlock stateBlock = createSnapshotStateBlock(imageInfo.device);
723             // TODO(b/294277842): make sure the queue is empty before using.
724             loadImageContent(stream, &stateBlock, unboxedImage, &imageInfo);
725             releaseSnapshotStateBlock(&stateBlock);
726         }
727 
728         // snapshot buffers
729         std::vector<VkBuffer> sortedBoxedBuffers;
730         for (const auto& bufferIte : mBufferInfo) {
731             sortedBoxedBuffers.push_back(
732                 unboxed_to_boxed_non_dispatchable_VkBuffer(bufferIte.first));
733         }
734         sort(sortedBoxedBuffers.begin(), sortedBoxedBuffers.end());
735         for (const auto& boxedBuffer : sortedBoxedBuffers) {
736             auto unboxedBuffer = unbox_VkBuffer(boxedBuffer);
737             const BufferInfo& bufferInfo = mBufferInfo[unboxedBuffer];
738             if (bufferInfo.memory == VK_NULL_HANDLE) {
739                 continue;
740             }
741             // TODO: add a special case for host mapped memory
742             StateBlock stateBlock = createSnapshotStateBlock(bufferInfo.device);
743             // TODO(b/294277842): make sure the queue is empty before using.
744             loadBufferContent(stream, &stateBlock, unboxedBuffer, &bufferInfo);
745             releaseSnapshotStateBlock(&stateBlock);
746         }
747 
748         // snapshot descriptors
749         std::vector<VkDescriptorPool> sortedBoxedDescriptorPools;
750         for (const auto& descriptorPoolIte : mDescriptorPoolInfo) {
751             auto boxed =
752                 unboxed_to_boxed_non_dispatchable_VkDescriptorPool(descriptorPoolIte.first);
753             sortedBoxedDescriptorPools.push_back(boxed);
754         }
755         sort(sortedBoxedDescriptorPools.begin(), sortedBoxedDescriptorPools.end());
756         for (const auto& boxedDescriptorPool : sortedBoxedDescriptorPools) {
757             auto unboxedDescriptorPool = unbox_VkDescriptorPool(boxedDescriptorPool);
758             const DescriptorPoolInfo& poolInfo = mDescriptorPoolInfo[unboxedDescriptorPool];
759 
760             std::vector<VkDescriptorSetLayout> layouts;
761             std::vector<uint64_t> poolIds;
762             std::vector<VkWriteDescriptorSet> writeDescriptorSets;
763             std::vector<uint32_t> writeStartingIndices;
764 
765             // Temporary structures for the pointers in VkWriteDescriptorSet.
766             // Use unique_ptr so that the pointers don't change when vector resizes.
767             std::vector<std::unique_ptr<VkDescriptorImageInfo>> tmpImageInfos;
768             std::vector<std::unique_ptr<VkDescriptorBufferInfo>> tmpBufferInfos;
769             std::vector<std::unique_ptr<VkBufferView>> tmpBufferViews;
770 
771             for (uint64_t poolId : poolInfo.poolIds) {
772                 bool allocated = stream->getByte();
773                 if (!allocated) {
774                     continue;
775                 }
776                 poolIds.push_back(poolId);
777                 writeStartingIndices.push_back(writeDescriptorSets.size());
778                 VkDescriptorSetLayout boxedLayout = (VkDescriptorSetLayout)stream->getBe64();
779                 layouts.push_back(unbox_VkDescriptorSetLayout(boxedLayout));
780                 uint64_t validWriteCount = stream->getBe64();
781                 for (int write = 0; write < validWriteCount; write++) {
782                     uint32_t binding = stream->getBe32();
783                     uint32_t arrayElement = stream->getBe32();
784                     DescriptorSetInfo::DescriptorWriteType writeType =
785                         static_cast<DescriptorSetInfo::DescriptorWriteType>(stream->getBe32());
786                     VkDescriptorType descriptorType =
787                         static_cast<VkDescriptorType>(stream->getBe32());
788                     VkWriteDescriptorSet writeDescriptorSet = {
789                         .sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
790                         .dstSet = (VkDescriptorSet)poolId,
791                         .dstBinding = binding,
792                         .dstArrayElement = arrayElement,
793                         .descriptorCount = 1,
794                         .descriptorType = descriptorType,
795                     };
796                     switch (writeType) {
797                         case DescriptorSetInfo::DescriptorWriteType::ImageInfo: {
798                             tmpImageInfos.push_back(std::make_unique<VkDescriptorImageInfo>());
799                             writeDescriptorSet.pImageInfo = tmpImageInfos.back().get();
800                             VkDescriptorImageInfo& imageInfo = *tmpImageInfos.back();
801                             stream->read(&imageInfo, sizeof(imageInfo));
802                             imageInfo.imageView = descriptorTypeContainsImage(descriptorType)
803                                                       ? unbox_VkImageView(imageInfo.imageView)
804                                                       : 0;
805                             imageInfo.sampler = descriptorTypeContainsSampler(descriptorType)
806                                                     ? unbox_VkSampler(imageInfo.sampler)
807                                                     : 0;
808                         } break;
809                         case DescriptorSetInfo::DescriptorWriteType::BufferInfo: {
810                             tmpBufferInfos.push_back(std::make_unique<VkDescriptorBufferInfo>());
811                             writeDescriptorSet.pBufferInfo = tmpBufferInfos.back().get();
812                             VkDescriptorBufferInfo& bufferInfo = *tmpBufferInfos.back();
813                             stream->read(&bufferInfo, sizeof(bufferInfo));
814                             bufferInfo.buffer = unbox_VkBuffer(bufferInfo.buffer);
815                         } break;
816                         case DescriptorSetInfo::DescriptorWriteType::BufferView: {
817                             tmpBufferViews.push_back(std::make_unique<VkBufferView>());
818                             writeDescriptorSet.pTexelBufferView = tmpBufferViews.back().get();
819                             VkBufferView& bufferView = *tmpBufferViews.back();
820                             stream->read(&bufferView, sizeof(bufferView));
821                             bufferView = unbox_VkBufferView(bufferView);
822                         } break;
823                         case DescriptorSetInfo::DescriptorWriteType::InlineUniformBlock:
824                         case DescriptorSetInfo::DescriptorWriteType::AccelerationStructure:
825                             // TODO
826                             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
827                                 << "Encountered pending inline uniform block or acceleration "
828                                    "structure "
829                                    "desc write, abort (NYI)";
830                         default:
831                             break;
832                     }
833                     writeDescriptorSets.push_back(writeDescriptorSet);
834                 }
835             }
836             std::vector<uint32_t> whichPool(poolIds.size(), 0);
837             std::vector<uint32_t> pendingAlloc(poolIds.size(), true);
838 
839             const auto& device = poolInfo.device;
840             const auto& deviceInfo = android::base::find(mDeviceInfo, device);
841             VulkanDispatch* dvk = dispatch_VkDevice(deviceInfo->boxed);
842             on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
843                 &bumpPool, dvk, device, 1, &unboxedDescriptorPool, poolIds.size(), layouts.data(),
844                 poolIds.data(), whichPool.data(), pendingAlloc.data(), writeStartingIndices.data(),
845                 writeDescriptorSets.size(), writeDescriptorSets.data());
846         }
847         mSnapshotState = SnapshotState::Normal;
848     }
849 
lock()850     void lock() { mLock.lock(); }
851 
unlock()852     void unlock() { mLock.unlock(); }
853 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)854     size_t setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
855         size_t consumed = 0;
856 
857         if (!buffer) return consumed;
858 
859         uint32_t bufferSize = *(uint32_t*)buffer;
860 
861         consumed += 4;
862 
863         uint32_t handleCount = bufferSize / 8;
864         VKDGS_LOG("incoming handle count: %u", handleCount);
865 
866         uint64_t* handles = (uint64_t*)(buffer + 4);
867 
868         mCreatedHandlesForSnapshotLoad.clear();
869         mCreatedHandlesForSnapshotLoadIndex = 0;
870 
871         for (uint32_t i = 0; i < handleCount; ++i) {
872             VKDGS_LOG("handle to load: 0x%llx", (unsigned long long)(uintptr_t)handles[i]);
873             mCreatedHandlesForSnapshotLoad.push_back(handles[i]);
874             consumed += 8;
875         }
876 
877         return consumed;
878     }
879 
clearCreatedHandlesForSnapshotLoad()880     void clearCreatedHandlesForSnapshotLoad() {
881         mCreatedHandlesForSnapshotLoad.clear();
882         mCreatedHandlesForSnapshotLoadIndex = 0;
883     }
884 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)885     VkResult on_vkEnumerateInstanceVersion(android::base::BumpPool* pool, uint32_t* pApiVersion) {
886         if (m_vk->vkEnumerateInstanceVersion) {
887             VkResult res = m_vk->vkEnumerateInstanceVersion(pApiVersion);
888 
889             if (*pApiVersion > kMaxSafeVersion) {
890                 *pApiVersion = kMaxSafeVersion;
891             }
892 
893             return res;
894         }
895         *pApiVersion = kMinVersion;
896         return VK_SUCCESS;
897     }
898 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)899     VkResult on_vkCreateInstance(android::base::BumpPool* pool,
900                                  const VkInstanceCreateInfo* pCreateInfo,
901                                  const VkAllocationCallbacks* pAllocator, VkInstance* pInstance) {
902         std::vector<const char*> finalExts = filteredInstanceExtensionNames(
903             pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
904 
905         // Create higher version instance whenever it is possible.
906         uint32_t apiVersion = VK_MAKE_VERSION(1, 0, 0);
907         if (pCreateInfo->pApplicationInfo) {
908             apiVersion = pCreateInfo->pApplicationInfo->apiVersion;
909         }
910         if (m_vk->vkEnumerateInstanceVersion) {
911             uint32_t instanceVersion;
912             VkResult result = m_vk->vkEnumerateInstanceVersion(&instanceVersion);
913             if (result == VK_SUCCESS && instanceVersion >= VK_MAKE_VERSION(1, 1, 0)) {
914                 apiVersion = instanceVersion;
915             }
916         }
917 
918         VkInstanceCreateInfo createInfoFiltered;
919         VkApplicationInfo appInfo = {};
920         deepcopy_VkInstanceCreateInfo(pool, VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, pCreateInfo,
921                                       &createInfoFiltered);
922 
923         createInfoFiltered.enabledExtensionCount = static_cast<uint32_t>(finalExts.size());
924         createInfoFiltered.ppEnabledExtensionNames = finalExts.data();
925         if (createInfoFiltered.pApplicationInfo != nullptr) {
926             const_cast<VkApplicationInfo*>(createInfoFiltered.pApplicationInfo)->apiVersion =
927                 apiVersion;
928             appInfo = *createInfoFiltered.pApplicationInfo;
929         }
930 
931         // remove VkDebugReportCallbackCreateInfoEXT and
932         // VkDebugUtilsMessengerCreateInfoEXT from the chain.
933         auto* curr = reinterpret_cast<vk_struct_common*>(&createInfoFiltered);
934         while (curr != nullptr) {
935             if (curr->pNext != nullptr &&
936                 (curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT ||
937                  curr->pNext->sType == VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT)) {
938                 curr->pNext = curr->pNext->pNext;
939             }
940             curr = curr->pNext;
941         }
942 
943 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
944         if (m_emu->instanceSupportsMoltenVK) {
945             createInfoFiltered.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
946         }
947 #endif
948 
949         // bug: 155795731
950         bool swiftshader =
951             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
952              0);
953         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
954 
955         if (swiftshader) {
956             if (mLogging) {
957                 fprintf(stderr, "%s: acquire lock\n", __func__);
958             }
959             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
960         }
961 
962         VkResult res = m_vk->vkCreateInstance(&createInfoFiltered, pAllocator, pInstance);
963 
964         if (res != VK_SUCCESS) {
965             WARN("Failed to create Vulkan instance: %s.", string_VkResult(res));
966             return res;
967         }
968 
969         if (!swiftshader) {
970             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
971         }
972 
973         // TODO: bug 129484301
974         get_emugl_vm_operations().setSkipSnapshotSave(!m_emu->features.VulkanSnapshots.enabled);
975 
976         InstanceInfo info;
977         info.apiVersion = apiVersion;
978         if (pCreateInfo->pApplicationInfo) {
979             if (pCreateInfo->pApplicationInfo->pApplicationName) {
980                 info.applicationName = pCreateInfo->pApplicationInfo->pApplicationName;
981             }
982             if (pCreateInfo->pApplicationInfo->pEngineName) {
983                 info.engineName = pCreateInfo->pApplicationInfo->pEngineName;
984             }
985         }
986         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
987             info.enabledExtensionNames.push_back(createInfoFiltered.ppEnabledExtensionNames[i]);
988         }
989 
990         INFO("Created VkInstance:%p for application:%s engine:%s.", *pInstance,
991              info.applicationName.c_str(), info.engineName.c_str());
992 
993         // Box it up
994         VkInstance boxed = new_boxed_VkInstance(*pInstance, nullptr, true /* own dispatch */);
995         init_vulkan_dispatch_from_instance(m_vk, *pInstance, dispatch_VkInstance(boxed));
996         info.boxed = boxed;
997 
998 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
999         if (m_emu->instanceSupportsMoltenVK) {
1000             if (!m_vk->vkSetMTLTextureMVK) {
1001                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find vkSetMTLTextureMVK";
1002             }
1003         }
1004 #endif
1005 
1006         std::string_view engineName = appInfo.pEngineName ? appInfo.pEngineName : "";
1007         info.isAngle = (engineName == "ANGLE");
1008 
1009         mInstanceInfo[*pInstance] = info;
1010 
1011         *pInstance = (VkInstance)info.boxed;
1012 
1013         auto fb = FrameBuffer::getFB();
1014         if (!fb) return res;
1015 
1016         if (vkCleanupEnabled()) {
1017             fb->registerProcessCleanupCallback(unbox_VkInstance(boxed), [this, boxed] {
1018                 vkDestroyInstanceImpl(unbox_VkInstance(boxed), nullptr);
1019             });
1020         }
1021 
1022         return res;
1023     }
1024 
vkDestroyInstanceImpl(VkInstance instance,const VkAllocationCallbacks * pAllocator)1025     void vkDestroyInstanceImpl(VkInstance instance, const VkAllocationCallbacks* pAllocator) {
1026         // Do delayed removes out of the lock, but get the list of devices to destroy inside the
1027         // lock.
1028         {
1029             std::lock_guard<std::recursive_mutex> lock(mLock);
1030             std::vector<VkDevice> devicesToDestroy;
1031 
1032             for (auto it : mDeviceToPhysicalDevice) {
1033                 auto* otherInstance = android::base::find(mPhysicalDeviceToInstance, it.second);
1034                 if (!otherInstance) continue;
1035                 if (instance == *otherInstance) {
1036                     devicesToDestroy.push_back(it.first);
1037                 }
1038             }
1039 
1040             for (auto device : devicesToDestroy) {
1041                 sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
1042             }
1043         }
1044 
1045         std::lock_guard<std::recursive_mutex> lock(mLock);
1046 
1047         teardownInstanceLocked(instance);
1048 
1049         if (mRenderDocWithMultipleVkInstances) {
1050             mRenderDocWithMultipleVkInstances->removeVkInstance(instance);
1051         }
1052         m_vk->vkDestroyInstance(instance, pAllocator);
1053 
1054         auto it = mPhysicalDeviceToInstance.begin();
1055 
1056         while (it != mPhysicalDeviceToInstance.end()) {
1057             if (it->second == instance) {
1058                 it = mPhysicalDeviceToInstance.erase(it);
1059             } else {
1060                 ++it;
1061             }
1062         }
1063 
1064         auto* instInfo = android::base::find(mInstanceInfo, instance);
1065         delete_VkInstance(instInfo->boxed);
1066         mInstanceInfo.erase(instance);
1067     }
1068 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance boxed_instance,const VkAllocationCallbacks * pAllocator)1069     void on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance boxed_instance,
1070                               const VkAllocationCallbacks* pAllocator) {
1071         auto instance = unbox_VkInstance(boxed_instance);
1072 
1073         vkDestroyInstanceImpl(instance, pAllocator);
1074 
1075         auto fb = FrameBuffer::getFB();
1076         if (!fb) return;
1077 
1078         fb->unregisterProcessCleanupCallback(instance);
1079     }
1080 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance boxed_instance,uint32_t * physicalDeviceCount,VkPhysicalDevice * physicalDevices)1081     VkResult on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool, VkInstance boxed_instance,
1082                                            uint32_t* physicalDeviceCount,
1083                                            VkPhysicalDevice* physicalDevices) {
1084         auto instance = unbox_VkInstance(boxed_instance);
1085         auto vk = dispatch_VkInstance(boxed_instance);
1086 
1087         uint32_t physicalDevicesSize = 0;
1088         if (physicalDeviceCount) {
1089             physicalDevicesSize = *physicalDeviceCount;
1090         }
1091 
1092         uint32_t actualPhysicalDeviceCount;
1093         auto res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount, nullptr);
1094         if (res != VK_SUCCESS) {
1095             return res;
1096         }
1097         std::vector<VkPhysicalDevice> validPhysicalDevices(actualPhysicalDeviceCount);
1098         res = vk->vkEnumeratePhysicalDevices(instance, &actualPhysicalDeviceCount,
1099                                              validPhysicalDevices.data());
1100         if (res != VK_SUCCESS) return res;
1101 
1102         std::lock_guard<std::recursive_mutex> lock(mLock);
1103 
1104         if (m_emu->instanceSupportsExternalMemoryCapabilities) {
1105             PFN_vkGetPhysicalDeviceProperties2KHR getPhysdevProps2Func =
1106                 vk_util::getVkInstanceProcAddrWithFallback<
1107                     vk_util::vk_fn_info::GetPhysicalDeviceProperties2>(
1108                     {
1109                         vk->vkGetInstanceProcAddr,
1110                         m_vk->vkGetInstanceProcAddr,
1111                     },
1112                     instance);
1113 
1114             if (getPhysdevProps2Func) {
1115                 validPhysicalDevices.erase(
1116                     std::remove_if(validPhysicalDevices.begin(), validPhysicalDevices.end(),
1117                                    [getPhysdevProps2Func, this](VkPhysicalDevice physicalDevice) {
1118                                        // We can get the device UUID.
1119                                        VkPhysicalDeviceIDPropertiesKHR idProps = {
1120                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
1121                                            nullptr,
1122                                        };
1123                                        VkPhysicalDeviceProperties2KHR propsWithId = {
1124                                            VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
1125                                            &idProps,
1126                                        };
1127                                        getPhysdevProps2Func(physicalDevice, &propsWithId);
1128 
1129                                        // Remove those devices whose UUIDs don't match the one
1130                                        // in VkCommonOperations.
1131                                        return memcmp(m_emu->deviceInfo.idProps.deviceUUID,
1132                                                      idProps.deviceUUID, VK_UUID_SIZE) != 0;
1133                                    }),
1134                     validPhysicalDevices.end());
1135             } else {
1136                 fprintf(stderr,
1137                         "%s: warning: failed to "
1138                         "vkGetPhysicalDeviceProperties2KHR\n",
1139                         __func__);
1140             }
1141         } else {
1142             // If we don't support ID properties then just advertise only the
1143             // first physical device.
1144             fprintf(stderr,
1145                     "%s: device id properties not supported, using first "
1146                     "physical device\n",
1147                     __func__);
1148         }
1149         if (!validPhysicalDevices.empty()) {
1150             validPhysicalDevices.erase(std::next(validPhysicalDevices.begin()),
1151                                        validPhysicalDevices.end());
1152         }
1153 
1154         if (physicalDeviceCount) {
1155             *physicalDeviceCount = validPhysicalDevices.size();
1156         }
1157 
1158         if (physicalDeviceCount && physicalDevices) {
1159             // Box them up
1160             for (uint32_t i = 0; i < std::min(*physicalDeviceCount, physicalDevicesSize); ++i) {
1161                 mPhysicalDeviceToInstance[validPhysicalDevices[i]] = instance;
1162 
1163                 auto& physdevInfo = mPhysdevInfo[validPhysicalDevices[i]];
1164                 physdevInfo.instance = instance;
1165                 physdevInfo.boxed = new_boxed_VkPhysicalDevice(validPhysicalDevices[i], vk,
1166                                                                false /* does not own dispatch */);
1167 
1168                 vk->vkGetPhysicalDeviceProperties(validPhysicalDevices[i], &physdevInfo.props);
1169 
1170                 if (physdevInfo.props.apiVersion > kMaxSafeVersion) {
1171                     physdevInfo.props.apiVersion = kMaxSafeVersion;
1172                 }
1173 
1174                 VkPhysicalDeviceMemoryProperties hostMemoryProperties;
1175                 vk->vkGetPhysicalDeviceMemoryProperties(validPhysicalDevices[i],
1176                                                         &hostMemoryProperties);
1177 
1178                 physdevInfo.memoryPropertiesHelper =
1179                     std::make_unique<EmulatedPhysicalDeviceMemoryProperties>(
1180                         hostMemoryProperties,
1181                         m_emu->representativeColorBufferMemoryTypeInfo->hostMemoryTypeIndex,
1182                         getFeatures());
1183 
1184                 uint32_t queueFamilyPropCount = 0;
1185 
1186                 vk->vkGetPhysicalDeviceQueueFamilyProperties(validPhysicalDevices[i],
1187                                                              &queueFamilyPropCount, nullptr);
1188 
1189                 physdevInfo.queueFamilyProperties.resize((size_t)queueFamilyPropCount);
1190 
1191                 vk->vkGetPhysicalDeviceQueueFamilyProperties(
1192                     validPhysicalDevices[i], &queueFamilyPropCount,
1193                     physdevInfo.queueFamilyProperties.data());
1194 
1195                 physicalDevices[i] = (VkPhysicalDevice)physdevInfo.boxed;
1196             }
1197             if (physicalDevicesSize < *physicalDeviceCount) {
1198                 res = VK_INCOMPLETE;
1199             }
1200         }
1201 
1202         return res;
1203     }
1204 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures * pFeatures)1205     void on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
1206                                         VkPhysicalDevice boxed_physicalDevice,
1207                                         VkPhysicalDeviceFeatures* pFeatures) {
1208         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1209         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1210 
1211         vk->vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
1212         pFeatures->textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
1213         pFeatures->textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
1214     }
1215 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)1216     void on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
1217                                          VkPhysicalDevice boxed_physicalDevice,
1218                                          VkPhysicalDeviceFeatures2* pFeatures) {
1219         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1220         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1221 
1222         std::lock_guard<std::recursive_mutex> lock(mLock);
1223 
1224         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1225         if (!physdevInfo) return;
1226 
1227         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1228         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1229         if (!instanceInfo) return;
1230 
1231         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1232             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1233             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
1234         } else if (hasInstanceExtension(instance,
1235                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1236             vk->vkGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures);
1237         } else {
1238             // No instance extension, fake it!!!!
1239             if (pFeatures->pNext) {
1240                 fprintf(stderr,
1241                         "%s: Warning: Trying to use extension struct in "
1242                         "VkPhysicalDeviceFeatures2 without having enabled "
1243                         "the extension!!!!11111\n",
1244                         __func__);
1245             }
1246             *pFeatures = {
1247                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
1248                 0,
1249             };
1250             vk->vkGetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
1251         }
1252 
1253         pFeatures->features.textureCompressionETC2 |= enableEmulatedEtc2(physicalDevice, vk);
1254         pFeatures->features.textureCompressionASTC_LDR |= enableEmulatedAstc(physicalDevice, vk);
1255         VkPhysicalDeviceSamplerYcbcrConversionFeatures* ycbcrFeatures =
1256             vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(pFeatures);
1257         if (ycbcrFeatures != nullptr) {
1258             ycbcrFeatures->samplerYcbcrConversion |= m_emu->enableYcbcrEmulation;
1259         }
1260         VkPhysicalDeviceProtectedMemoryFeatures* protectedMemoryFeatures =
1261             vk_find_struct<VkPhysicalDeviceProtectedMemoryFeatures>(pFeatures);
1262         if (protectedMemoryFeatures != nullptr) {
1263             // Protected memory is not supported on emulators. Override feature
1264             // information to mark as unsupported (see b/329845987).
1265             protectedMemoryFeatures->protectedMemory = VK_FALSE;
1266         }
1267     }
1268 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1269     VkResult on_vkGetPhysicalDeviceImageFormatProperties(
1270         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice, VkFormat format,
1271         VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
1272         VkImageFormatProperties* pImageFormatProperties) {
1273         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1274         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1275         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
1276         if (emulatedTexture) {
1277             if (!supportEmulatedCompressedImageFormatProperty(format, type, tiling, usage, flags)) {
1278                 memset(pImageFormatProperties, 0, sizeof(VkImageFormatProperties));
1279                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1280             }
1281             flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT;
1282             flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1283             usage |= VK_IMAGE_USAGE_STORAGE_BIT;
1284             format = CompressedImageInfo::getCompressedMipmapsFormat(format);
1285         }
1286 
1287         VkResult res = vk->vkGetPhysicalDeviceImageFormatProperties(
1288             physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
1289         if (res != VK_SUCCESS) {
1290             return res;
1291         }
1292         if (emulatedTexture) {
1293             maskImageFormatPropertiesForEmulatedTextures(pImageFormatProperties);
1294         }
1295         return res;
1296     }
1297 
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1298     VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
1299         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1300         const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
1301         VkImageFormatProperties2* pImageFormatProperties) {
1302         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1303         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1304         VkPhysicalDeviceImageFormatInfo2 imageFormatInfo;
1305         VkFormat format = pImageFormatInfo->format;
1306         const bool emulatedTexture = isEmulatedCompressedTexture(format, physicalDevice, vk);
1307         if (emulatedTexture) {
1308             if (!supportEmulatedCompressedImageFormatProperty(
1309                     pImageFormatInfo->format, pImageFormatInfo->type, pImageFormatInfo->tiling,
1310                     pImageFormatInfo->usage, pImageFormatInfo->flags)) {
1311                 memset(&pImageFormatProperties->imageFormatProperties, 0,
1312                        sizeof(VkImageFormatProperties));
1313                 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1314             }
1315             imageFormatInfo = *pImageFormatInfo;
1316             pImageFormatInfo = &imageFormatInfo;
1317             imageFormatInfo.flags &= ~VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT;
1318             imageFormatInfo.flags |= VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1319             imageFormatInfo.usage |= VK_IMAGE_USAGE_STORAGE_BIT;
1320             imageFormatInfo.format = CompressedImageInfo::getCompressedMipmapsFormat(format);
1321         }
1322         std::lock_guard<std::recursive_mutex> lock(mLock);
1323 
1324         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1325         if (!physdevInfo) {
1326             return VK_ERROR_OUT_OF_HOST_MEMORY;
1327         }
1328 
1329         VkResult res = VK_ERROR_INITIALIZATION_FAILED;
1330 
1331         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1332         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1333         if (!instanceInfo) {
1334             return res;
1335         }
1336 
1337         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1338             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1339             res = vk->vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo,
1340                                                                 pImageFormatProperties);
1341         } else if (hasInstanceExtension(instance,
1342                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1343             res = vk->vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo,
1344                                                                    pImageFormatProperties);
1345         } else {
1346             // No instance extension, fake it!!!!
1347             if (pImageFormatProperties->pNext) {
1348                 fprintf(stderr,
1349                         "%s: Warning: Trying to use extension struct in "
1350                         "VkPhysicalDeviceFeatures2 without having enabled "
1351                         "the extension!!!!11111\n",
1352                         __func__);
1353             }
1354             *pImageFormatProperties = {
1355                 VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
1356                 0,
1357             };
1358             res = vk->vkGetPhysicalDeviceImageFormatProperties(
1359                 physicalDevice, pImageFormatInfo->format, pImageFormatInfo->type,
1360                 pImageFormatInfo->tiling, pImageFormatInfo->usage, pImageFormatInfo->flags,
1361                 &pImageFormatProperties->imageFormatProperties);
1362         }
1363         if (res != VK_SUCCESS) {
1364             return res;
1365         }
1366 
1367         const VkPhysicalDeviceExternalImageFormatInfo* extImageFormatInfo =
1368             vk_find_struct<VkPhysicalDeviceExternalImageFormatInfo>(pImageFormatInfo);
1369         VkExternalImageFormatProperties* extImageFormatProps =
1370             vk_find_struct<VkExternalImageFormatProperties>(pImageFormatProperties);
1371 
1372         // Only allow dedicated allocations for external images.
1373         if (extImageFormatInfo && extImageFormatProps) {
1374             extImageFormatProps->externalMemoryProperties.externalMemoryFeatures |=
1375                 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT;
1376         }
1377 
1378         if (emulatedTexture) {
1379             maskImageFormatPropertiesForEmulatedTextures(
1380                 &pImageFormatProperties->imageFormatProperties);
1381         }
1382 
1383         return res;
1384     }
1385 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1386     void on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool* pool,
1387                                                 VkPhysicalDevice boxed_physicalDevice,
1388                                                 VkFormat format,
1389                                                 VkFormatProperties* pFormatProperties) {
1390         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1391         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1392         getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
1393             [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1394                  VkFormatProperties* pFormatProperties) {
1395                 vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
1396             },
1397             vk, physicalDevice, format, pFormatProperties);
1398     }
1399 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)1400     void on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool* pool,
1401                                                  VkPhysicalDevice boxed_physicalDevice,
1402                                                  VkFormat format,
1403                                                  VkFormatProperties2* pFormatProperties) {
1404         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1405         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1406 
1407         std::lock_guard<std::recursive_mutex> lock(mLock);
1408 
1409         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1410         if (!physdevInfo) return;
1411 
1412         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1413         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1414         if (!instanceInfo) return;
1415 
1416         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1417             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1418             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
1419                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1420                      VkFormatProperties2* pFormatProperties) {
1421                     vk->vkGetPhysicalDeviceFormatProperties2(physicalDevice, format,
1422                                                              pFormatProperties);
1423                 },
1424                 vk, physicalDevice, format, pFormatProperties);
1425         } else if (hasInstanceExtension(instance,
1426                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1427             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties2>(
1428                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1429                      VkFormatProperties2* pFormatProperties) {
1430                     vk->vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format,
1431                                                                 pFormatProperties);
1432                 },
1433                 vk, physicalDevice, format, pFormatProperties);
1434         } else {
1435             // No instance extension, fake it!!!!
1436             if (pFormatProperties->pNext) {
1437                 fprintf(stderr,
1438                         "%s: Warning: Trying to use extension struct in "
1439                         "vkGetPhysicalDeviceFormatProperties2 without having "
1440                         "enabled the extension!!!!11111\n",
1441                         __func__);
1442             }
1443             pFormatProperties->sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2;
1444             getPhysicalDeviceFormatPropertiesCore<VkFormatProperties>(
1445                 [vk](VkPhysicalDevice physicalDevice, VkFormat format,
1446                      VkFormatProperties* pFormatProperties) {
1447                     vk->vkGetPhysicalDeviceFormatProperties(physicalDevice, format,
1448                                                             pFormatProperties);
1449                 },
1450                 vk, physicalDevice, format, &pFormatProperties->formatProperties);
1451         }
1452     }
1453 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties * pProperties)1454     void on_vkGetPhysicalDeviceProperties(android::base::BumpPool* pool,
1455                                           VkPhysicalDevice boxed_physicalDevice,
1456                                           VkPhysicalDeviceProperties* pProperties) {
1457         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1458         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1459 
1460         vk->vkGetPhysicalDeviceProperties(physicalDevice, pProperties);
1461 
1462         if (pProperties->apiVersion > kMaxSafeVersion) {
1463             pProperties->apiVersion = kMaxSafeVersion;
1464         }
1465     }
1466 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1467     void on_vkGetPhysicalDeviceProperties2(android::base::BumpPool* pool,
1468                                            VkPhysicalDevice boxed_physicalDevice,
1469                                            VkPhysicalDeviceProperties2* pProperties) {
1470         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1471         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1472 
1473         std::lock_guard<std::recursive_mutex> lock(mLock);
1474 
1475         auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
1476         if (!physdevInfo) return;
1477 
1478         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1479         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1480         if (!instanceInfo) return;
1481 
1482         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1483             physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1484             vk->vkGetPhysicalDeviceProperties2(physicalDevice, pProperties);
1485         } else if (hasInstanceExtension(instance,
1486                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1487             vk->vkGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties);
1488         } else {
1489             // No instance extension, fake it!!!!
1490             if (pProperties->pNext) {
1491                 fprintf(stderr,
1492                         "%s: Warning: Trying to use extension struct in "
1493                         "VkPhysicalDeviceProperties2 without having enabled "
1494                         "the extension!!!!11111\n",
1495                         __func__);
1496             }
1497             *pProperties = {
1498                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
1499                 0,
1500             };
1501             vk->vkGetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1502         }
1503 
1504         if (pProperties->properties.apiVersion > kMaxSafeVersion) {
1505             pProperties->properties.apiVersion = kMaxSafeVersion;
1506         }
1507     }
1508 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)1509     void on_vkGetPhysicalDeviceMemoryProperties(
1510         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1511         VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
1512         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1513         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1514 
1515         std::lock_guard<std::recursive_mutex> lock(mLock);
1516 
1517         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, physicalDevice);
1518         if (!physicalDeviceInfo) {
1519             ERR("Failed to find physical device info.");
1520             return;
1521         }
1522 
1523         auto& physicalDeviceMemoryHelper = physicalDeviceInfo->memoryPropertiesHelper;
1524         *pMemoryProperties = physicalDeviceMemoryHelper->getGuestMemoryProperties();
1525     }
1526 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1527     void on_vkGetPhysicalDeviceMemoryProperties2(
1528         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1529         VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
1530         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1531         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1532 
1533         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, physicalDevice);
1534         if (!physicalDeviceInfo) return;
1535 
1536         auto instance = mPhysicalDeviceToInstance[physicalDevice];
1537         auto* instanceInfo = android::base::find(mInstanceInfo, instance);
1538         if (!instanceInfo) return;
1539 
1540         if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
1541             physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
1542             vk->vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
1543         } else if (hasInstanceExtension(instance,
1544                                         VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
1545             vk->vkGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties);
1546         } else {
1547             // No instance extension, fake it!!!!
1548             if (pMemoryProperties->pNext) {
1549                 fprintf(stderr,
1550                         "%s: Warning: Trying to use extension struct in "
1551                         "VkPhysicalDeviceMemoryProperties2 without having enabled "
1552                         "the extension!!!!11111\n",
1553                         __func__);
1554             }
1555             *pMemoryProperties = {
1556                 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
1557                 0,
1558             };
1559         }
1560 
1561         auto& physicalDeviceMemoryHelper = physicalDeviceInfo->memoryPropertiesHelper;
1562         pMemoryProperties->memoryProperties =
1563             physicalDeviceMemoryHelper->getGuestMemoryProperties();
1564     }
1565 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1566     VkResult on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool* pool,
1567                                                      VkPhysicalDevice boxed_physicalDevice,
1568                                                      const char* pLayerName,
1569                                                      uint32_t* pPropertyCount,
1570                                                      VkExtensionProperties* pProperties) {
1571         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1572         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1573 
1574         bool shouldPassthrough = !m_emu->enableYcbcrEmulation;
1575 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
1576         shouldPassthrough = shouldPassthrough && !m_emu->instanceSupportsMoltenVK;
1577 #endif
1578         if (shouldPassthrough) {
1579             return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
1580                                                             pPropertyCount, pProperties);
1581         }
1582 
1583         // If MoltenVK is supported on host, we need to ensure that we include
1584         // VK_MVK_moltenvk extenstion in returned properties.
1585         std::vector<VkExtensionProperties> properties;
1586         VkResult result =
1587             enumerateDeviceExtensionProperties(vk, physicalDevice, pLayerName, properties);
1588         if (result != VK_SUCCESS) {
1589             return result;
1590         }
1591 
1592 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
1593         if (m_emu->instanceSupportsMoltenVK &&
1594             !hasDeviceExtension(properties, VK_MVK_MOLTENVK_EXTENSION_NAME)) {
1595             VkExtensionProperties mvk_props;
1596             strncpy(mvk_props.extensionName, VK_MVK_MOLTENVK_EXTENSION_NAME,
1597                     sizeof(mvk_props.extensionName));
1598             mvk_props.specVersion = VK_MVK_MOLTENVK_SPEC_VERSION;
1599             properties.push_back(mvk_props);
1600         }
1601 #endif
1602 
1603         if (m_emu->enableYcbcrEmulation &&
1604             !hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
1605             VkExtensionProperties ycbcr_props;
1606             strncpy(ycbcr_props.extensionName, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME,
1607                     sizeof(ycbcr_props.extensionName));
1608             ycbcr_props.specVersion = VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION;
1609             properties.push_back(ycbcr_props);
1610         }
1611         if (pProperties == nullptr) {
1612             *pPropertyCount = properties.size();
1613         } else {
1614             // return number of structures actually written to pProperties.
1615             *pPropertyCount = std::min((uint32_t)properties.size(), *pPropertyCount);
1616             memcpy(pProperties, properties.data(), *pPropertyCount * sizeof(VkExtensionProperties));
1617         }
1618         return *pPropertyCount < properties.size() ? VK_INCOMPLETE : VK_SUCCESS;
1619     }
1620 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)1621     VkResult on_vkCreateDevice(android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
1622                                const VkDeviceCreateInfo* pCreateInfo,
1623                                const VkAllocationCallbacks* pAllocator, VkDevice* pDevice) {
1624         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
1625         auto vk = dispatch_VkPhysicalDevice(boxed_physicalDevice);
1626 
1627         const std::vector<const char*> finalExts =
1628             filteredDeviceExtensionNames(vk, physicalDevice, pCreateInfo->enabledExtensionCount,
1629                                          pCreateInfo->ppEnabledExtensionNames);
1630 
1631         // Run the underlying API call, filtering extensions.
1632         VkDeviceCreateInfo createInfoFiltered = *pCreateInfo;
1633         // According to the spec, it seems that the application can use compressed texture formats
1634         // without enabling the feature when creating the VkDevice, as long as
1635         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties reports
1636         // support: to query for additional properties, or if the feature is not enabled,
1637         // vkGetPhysicalDeviceFormatProperties and vkGetPhysicalDeviceImageFormatProperties can be
1638         // used to check for supported properties of individual formats as normal.
1639         bool emulateTextureEtc2 = needEmulatedEtc2(physicalDevice, vk);
1640         bool emulateTextureAstc = needEmulatedAstc(physicalDevice, vk);
1641         VkPhysicalDeviceFeatures featuresFiltered;
1642         std::vector<VkPhysicalDeviceFeatures*> featuresToFilter;
1643 
1644         if (pCreateInfo->pEnabledFeatures) {
1645             featuresFiltered = *pCreateInfo->pEnabledFeatures;
1646             createInfoFiltered.pEnabledFeatures = &featuresFiltered;
1647             featuresToFilter.emplace_back(&featuresFiltered);
1648         }
1649 
1650         if (VkPhysicalDeviceFeatures2* features2 =
1651                 vk_find_struct<VkPhysicalDeviceFeatures2>(&createInfoFiltered)) {
1652             featuresToFilter.emplace_back(&features2->features);
1653         }
1654 
1655         for (VkPhysicalDeviceFeatures* feature : featuresToFilter) {
1656             if (emulateTextureEtc2) {
1657                 feature->textureCompressionETC2 = VK_FALSE;
1658             }
1659             if (emulateTextureAstc) {
1660                 feature->textureCompressionASTC_LDR = VK_FALSE;
1661             }
1662         }
1663 
1664         if (auto* ycbcrFeatures = vk_find_struct<VkPhysicalDeviceSamplerYcbcrConversionFeatures>(
1665                 &createInfoFiltered)) {
1666             if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
1667                 ycbcrFeatures->samplerYcbcrConversion = VK_FALSE;
1668             }
1669         }
1670 
1671         if (auto* swapchainMaintenance1Features =
1672                 vk_find_struct<VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT>(
1673                     &createInfoFiltered)) {
1674             if (!supportsSwapchainMaintenance1(physicalDevice, vk)) {
1675                 swapchainMaintenance1Features->swapchainMaintenance1 = VK_FALSE;
1676             }
1677         }
1678 
1679 #ifdef __APPLE__
1680 #ifndef VK_ENABLE_BETA_EXTENSIONS
1681         // TODO: Update Vulkan headers, stringhelpers and compilation parameters to use
1682         // this directly from beta extensions and use regular chain append commands
1683         const VkStructureType VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR =
1684             (VkStructureType)1000163000;
1685 #endif
1686         // Enable all portability features supported on the device
1687         VkPhysicalDevicePortabilitySubsetFeaturesKHR supportedPortabilityFeatures = {
1688             VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR, nullptr};
1689         if (m_emu->instanceSupportsMoltenVK) {
1690             VkPhysicalDeviceFeatures2 features2 = {
1691                 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
1692                 .pNext = &supportedPortabilityFeatures,
1693             };
1694             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, &features2);
1695 
1696             if (mVerbosePrints) {
1697                 fprintf(stderr,
1698                         "VERBOSE:%s: MoltenVK supportedPortabilityFeatures\n"
1699                         "constantAlphaColorBlendFactors = %d\n"
1700                         "events = %d\n"
1701                         "imageViewFormatReinterpretation = %d\n"
1702                         "imageViewFormatSwizzle = %d\n"
1703                         "imageView2DOn3DImage = %d\n"
1704                         "multisampleArrayImage = %d\n"
1705                         "mutableComparisonSamplers = %d\n"
1706                         "pointPolygons = %d\n"
1707                         "samplerMipLodBias = %d\n"
1708                         "separateStencilMaskRef = %d\n"
1709                         "shaderSampleRateInterpolationFunctions = %d\n"
1710                         "tessellationIsolines = %d\n"
1711                         "tessellationPointMode = %d\n"
1712                         "triangleFans = %d\n"
1713                         "vertexAttributeAccessBeyondStride = %d\n",
1714                         __func__, supportedPortabilityFeatures.constantAlphaColorBlendFactors,
1715                         supportedPortabilityFeatures.events,
1716                         supportedPortabilityFeatures.imageViewFormatReinterpretation,
1717                         supportedPortabilityFeatures.imageViewFormatSwizzle,
1718                         supportedPortabilityFeatures.imageView2DOn3DImage,
1719                         supportedPortabilityFeatures.multisampleArrayImage,
1720                         supportedPortabilityFeatures.mutableComparisonSamplers,
1721                         supportedPortabilityFeatures.pointPolygons,
1722                         supportedPortabilityFeatures.samplerMipLodBias,
1723                         supportedPortabilityFeatures.separateStencilMaskRef,
1724                         supportedPortabilityFeatures.shaderSampleRateInterpolationFunctions,
1725                         supportedPortabilityFeatures.tessellationIsolines,
1726                         supportedPortabilityFeatures.tessellationPointMode,
1727                         supportedPortabilityFeatures.triangleFans,
1728                         supportedPortabilityFeatures.vertexAttributeAccessBeyondStride);
1729             }
1730 
1731             // Insert into device create info chain
1732             supportedPortabilityFeatures.pNext = const_cast<void*>(createInfoFiltered.pNext);
1733             createInfoFiltered.pNext = &supportedPortabilityFeatures;
1734         }
1735 #endif
1736 
1737         // Filter device memory report as callbacks can not be passed between guest and host.
1738         vk_struct_chain_filter<VkDeviceDeviceMemoryReportCreateInfoEXT>(&createInfoFiltered);
1739 
1740         createInfoFiltered.enabledExtensionCount = (uint32_t)finalExts.size();
1741         createInfoFiltered.ppEnabledExtensionNames = finalExts.data();
1742 
1743         // bug: 155795731
1744         bool swiftshader =
1745             (android::base::getEnvironmentVariable("ANDROID_EMU_VK_ICD").compare("swiftshader") ==
1746              0);
1747 
1748         std::unique_ptr<std::lock_guard<std::recursive_mutex>> lock = nullptr;
1749 
1750         if (swiftshader) {
1751             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1752         }
1753 
1754         VkResult result =
1755             vk->vkCreateDevice(physicalDevice, &createInfoFiltered, pAllocator, pDevice);
1756 
1757         if (result != VK_SUCCESS) return result;
1758 
1759         if (!swiftshader) {
1760             lock = std::make_unique<std::lock_guard<std::recursive_mutex>>(mLock);
1761         }
1762 
1763         mDeviceToPhysicalDevice[*pDevice] = physicalDevice;
1764 
1765         auto physicalDeviceInfoIt = mPhysdevInfo.find(physicalDevice);
1766         if (physicalDeviceInfoIt == mPhysdevInfo.end()) return VK_ERROR_INITIALIZATION_FAILED;
1767         auto& physicalDeviceInfo = physicalDeviceInfoIt->second;
1768 
1769         auto instanceInfoIt = mInstanceInfo.find(physicalDeviceInfo.instance);
1770         if (instanceInfoIt == mInstanceInfo.end()) return VK_ERROR_INITIALIZATION_FAILED;
1771         auto& instanceInfo = instanceInfoIt->second;
1772 
1773         // Fill out information about the logical device here.
1774         auto& deviceInfo = mDeviceInfo[*pDevice];
1775         deviceInfo.physicalDevice = physicalDevice;
1776         deviceInfo.emulateTextureEtc2 = emulateTextureEtc2;
1777         deviceInfo.emulateTextureAstc = emulateTextureAstc;
1778         deviceInfo.useAstcCpuDecompression =
1779             m_emu->astcLdrEmulationMode == AstcEmulationMode::Cpu &&
1780             AstcCpuDecompressor::get().available();
1781         deviceInfo.decompPipelines =
1782             std::make_unique<GpuDecompressionPipelineManager>(m_vk, *pDevice);
1783 
1784         INFO("Created VkDevice:%p for application:%s engine:%s ASTC emulation:%s CPU decoding:%s.",
1785              *pDevice, instanceInfo.applicationName.c_str(), instanceInfo.engineName.c_str(),
1786              deviceInfo.emulateTextureAstc ? "on" : "off",
1787              deviceInfo.useAstcCpuDecompression ? "on" : "off");
1788 
1789         for (uint32_t i = 0; i < createInfoFiltered.enabledExtensionCount; ++i) {
1790             deviceInfo.enabledExtensionNames.push_back(
1791                 createInfoFiltered.ppEnabledExtensionNames[i]);
1792         }
1793 
1794         // First, get the dispatch table.
1795         VkDevice boxed = new_boxed_VkDevice(*pDevice, nullptr, true /* own dispatch */);
1796 
1797         if (mLogging) {
1798             fprintf(stderr, "%s: init vulkan dispatch from device\n", __func__);
1799         }
1800 
1801         VulkanDispatch* dispatch = dispatch_VkDevice(boxed);
1802         init_vulkan_dispatch_from_device(vk, *pDevice, dispatch);
1803         if (m_emu->debugUtilsAvailableAndRequested) {
1804             deviceInfo.debugUtilsHelper = DebugUtilsHelper::withUtilsEnabled(*pDevice, dispatch);
1805         }
1806 
1807         deviceInfo.externalFencePool =
1808             std::make_unique<ExternalFencePool<VulkanDispatch>>(dispatch, *pDevice);
1809 
1810         deviceInfo.deviceOpTracker.emplace(*pDevice, dispatch);
1811 
1812         if (mLogging) {
1813             fprintf(stderr, "%s: init vulkan dispatch from device (end)\n", __func__);
1814         }
1815 
1816         deviceInfo.boxed = boxed;
1817 
1818         // Next, get information about the queue families used by this device.
1819         std::unordered_map<uint32_t, uint32_t> queueFamilyIndexCounts;
1820         for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; ++i) {
1821             const auto& queueCreateInfo = pCreateInfo->pQueueCreateInfos[i];
1822             // Check only queues created with flags = 0 in VkDeviceQueueCreateInfo.
1823             auto flags = queueCreateInfo.flags;
1824             if (flags) continue;
1825             uint32_t queueFamilyIndex = queueCreateInfo.queueFamilyIndex;
1826             uint32_t queueCount = queueCreateInfo.queueCount;
1827             queueFamilyIndexCounts[queueFamilyIndex] = queueCount;
1828         }
1829 
1830         std::vector<uint64_t> extraHandles;
1831         for (auto it : queueFamilyIndexCounts) {
1832             auto index = it.first;
1833             auto count = it.second;
1834             auto& queues = deviceInfo.queues[index];
1835             for (uint32_t i = 0; i < count; ++i) {
1836                 VkQueue queueOut;
1837 
1838                 if (mLogging) {
1839                     fprintf(stderr, "%s: get device queue (begin)\n", __func__);
1840                 }
1841 
1842                 vk->vkGetDeviceQueue(*pDevice, index, i, &queueOut);
1843 
1844                 if (mLogging) {
1845                     fprintf(stderr, "%s: get device queue (end)\n", __func__);
1846                 }
1847                 queues.push_back(queueOut);
1848                 mQueueInfo[queueOut].device = *pDevice;
1849                 mQueueInfo[queueOut].queueFamilyIndex = index;
1850 
1851                 auto boxed = new_boxed_VkQueue(queueOut, dispatch_VkDevice(deviceInfo.boxed),
1852                                                false /* does not own dispatch */);
1853                 extraHandles.push_back((uint64_t)boxed);
1854                 mQueueInfo[queueOut].boxed = boxed;
1855                 mQueueInfo[queueOut].lock = new Lock;
1856             }
1857         }
1858         if (snapshotsEnabled()) {
1859             snapshot()->createExtraHandlesForNextApi(extraHandles.data(), extraHandles.size());
1860         }
1861 
1862         // Box the device.
1863         *pDevice = (VkDevice)deviceInfo.boxed;
1864 
1865         if (mLogging) {
1866             fprintf(stderr, "%s: (end)\n", __func__);
1867         }
1868 
1869         return VK_SUCCESS;
1870     }
1871 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)1872     void on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice boxed_device,
1873                              uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue) {
1874         auto device = unbox_VkDevice(boxed_device);
1875 
1876         std::lock_guard<std::recursive_mutex> lock(mLock);
1877 
1878         *pQueue = VK_NULL_HANDLE;
1879 
1880         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1881         if (!deviceInfo) return;
1882 
1883         const auto& queues = deviceInfo->queues;
1884 
1885         const auto* queueList = android::base::find(queues, queueFamilyIndex);
1886         if (!queueList) return;
1887         if (queueIndex >= queueList->size()) return;
1888 
1889         VkQueue unboxedQueue = (*queueList)[queueIndex];
1890 
1891         auto* queueInfo = android::base::find(mQueueInfo, unboxedQueue);
1892         if (!queueInfo) return;
1893 
1894         *pQueue = (VkQueue)queueInfo->boxed;
1895     }
1896 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice boxed_device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)1897     void on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice boxed_device,
1898                               const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {
1899         // Protected memory is not supported on emulators. So we should
1900         // not return any queue if a client requests a protected device
1901         // queue. See b/328436383.
1902         if (pQueueInfo->flags & VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT) {
1903             *pQueue = VK_NULL_HANDLE;
1904             fprintf(stderr, "%s: Cannot get protected Vulkan device queue\n", __func__);
1905             return;
1906         }
1907         uint32_t queueFamilyIndex = pQueueInfo->queueFamilyIndex;
1908         uint32_t queueIndex = pQueueInfo->queueIndex;
1909         on_vkGetDeviceQueue(pool, boxed_device, queueFamilyIndex, queueIndex, pQueue);
1910     }
1911 
destroyDeviceLocked(VkDevice device,const VkAllocationCallbacks * pAllocator)1912     void destroyDeviceLocked(VkDevice device, const VkAllocationCallbacks* pAllocator) {
1913         auto* deviceInfo = android::base::find(mDeviceInfo, device);
1914         if (!deviceInfo) return;
1915 
1916         deviceInfo->decompPipelines->clear();
1917 
1918         auto eraseIt = mQueueInfo.begin();
1919         for (; eraseIt != mQueueInfo.end();) {
1920             if (eraseIt->second.device == device) {
1921                 delete eraseIt->second.lock;
1922                 delete_VkQueue(eraseIt->second.boxed);
1923                 eraseIt = mQueueInfo.erase(eraseIt);
1924             } else {
1925                 ++eraseIt;
1926             }
1927         }
1928 
1929         VulkanDispatch* deviceDispatch = dispatch_VkDevice(deviceInfo->boxed);
1930 
1931         // Destroy pooled external fences
1932         auto deviceFences = deviceInfo->externalFencePool->popAll();
1933         for (auto fence : deviceFences) {
1934             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
1935             mFenceInfo.erase(fence);
1936         }
1937 
1938         for (auto fence : findDeviceObjects(device, mFenceInfo)) {
1939             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
1940             mFenceInfo.erase(fence);
1941         }
1942 
1943         deviceInfo->deviceOpTracker->OnDestroyDevice();
1944 
1945         // Run the underlying API call.
1946         m_vk->vkDestroyDevice(device, pAllocator);
1947 
1948         delete_VkDevice(deviceInfo->boxed);
1949     }
1950 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice boxed_device,const VkAllocationCallbacks * pAllocator)1951     void on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice boxed_device,
1952                             const VkAllocationCallbacks* pAllocator) {
1953         auto device = unbox_VkDevice(boxed_device);
1954 
1955         std::lock_guard<std::recursive_mutex> lock(mLock);
1956 
1957         sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
1958         destroyDeviceLocked(device, pAllocator);
1959 
1960         mDeviceInfo.erase(device);
1961         mDeviceToPhysicalDevice.erase(device);
1962     }
1963 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)1964     VkResult on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice boxed_device,
1965                                const VkBufferCreateInfo* pCreateInfo,
1966                                const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer) {
1967         auto device = unbox_VkDevice(boxed_device);
1968         auto vk = dispatch_VkDevice(boxed_device);
1969         VkBufferCreateInfo localCreateInfo;
1970         if (snapshotsEnabled()) {
1971             localCreateInfo = *pCreateInfo;
1972             // Add transfer src bit for potential device local memories.
1973             //
1974             // There are 3 ways to populate buffer content:
1975             //   a) use host coherent memory and memory mapping;
1976             //   b) use transfer_dst and vkcmdcopy* (for device local memories);
1977             //   c) use storage and compute shaders.
1978             //
1979             // (a) is covered by memory snapshot. (b) requires an extra vkCmdCopyBuffer
1980             // command on snapshot, thuse we need to add transfer_src for (b) so that
1981             // they could be loaded back on snapshot save. (c) is still future work.
1982             if (localCreateInfo.usage & VK_BUFFER_USAGE_TRANSFER_DST_BIT) {
1983                 localCreateInfo.usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
1984             }
1985             pCreateInfo = &localCreateInfo;
1986         }
1987 
1988         VkResult result = vk->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
1989 
1990         if (result == VK_SUCCESS) {
1991             std::lock_guard<std::recursive_mutex> lock(mLock);
1992             auto& bufInfo = mBufferInfo[*pBuffer];
1993             bufInfo.device = device;
1994             bufInfo.usage = pCreateInfo->usage;
1995             bufInfo.size = pCreateInfo->size;
1996             *pBuffer = new_boxed_non_dispatchable_VkBuffer(*pBuffer);
1997         }
1998 
1999         return result;
2000     }
2001 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)2002     void on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice boxed_device, VkBuffer buffer,
2003                             const VkAllocationCallbacks* pAllocator) {
2004         auto device = unbox_VkDevice(boxed_device);
2005         auto vk = dispatch_VkDevice(boxed_device);
2006 
2007         vk->vkDestroyBuffer(device, buffer, pAllocator);
2008 
2009         std::lock_guard<std::recursive_mutex> lock(mLock);
2010         mBufferInfo.erase(buffer);
2011     }
2012 
setBufferMemoryBindInfoLocked(VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)2013     void setBufferMemoryBindInfoLocked(VkDevice device, VkBuffer buffer, VkDeviceMemory memory,
2014                                        VkDeviceSize memoryOffset) {
2015         auto* bufferInfo = android::base::find(mBufferInfo, buffer);
2016         if (!bufferInfo) return;
2017         bufferInfo->memory = memory;
2018         bufferInfo->memoryOffset = memoryOffset;
2019 
2020         auto* memoryInfo = android::base::find(mMemoryInfo, memory);
2021         if (memoryInfo && memoryInfo->boundBuffer) {
2022             auto* deviceInfo = android::base::find(mDeviceInfo, device);
2023             if (deviceInfo) {
2024                 deviceInfo->debugUtilsHelper.addDebugLabel(buffer, "Buffer:%d",
2025                                                            *memoryInfo->boundBuffer);
2026             }
2027         }
2028     }
2029 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)2030     VkResult on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice boxed_device,
2031                                    VkBuffer buffer, VkDeviceMemory memory,
2032                                    VkDeviceSize memoryOffset) {
2033         auto device = unbox_VkDevice(boxed_device);
2034         auto vk = dispatch_VkDevice(boxed_device);
2035 
2036         VALIDATE_REQUIRED_HANDLE(memory);
2037         VkResult result = vk->vkBindBufferMemory(device, buffer, memory, memoryOffset);
2038 
2039         if (result == VK_SUCCESS) {
2040             std::lock_guard<std::recursive_mutex> lock(mLock);
2041             setBufferMemoryBindInfoLocked(device, buffer, memory, memoryOffset);
2042         }
2043         return result;
2044     }
2045 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2046     VkResult on_vkBindBufferMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
2047                                     uint32_t bindInfoCount,
2048                                     const VkBindBufferMemoryInfo* pBindInfos) {
2049         auto device = unbox_VkDevice(boxed_device);
2050         auto vk = dispatch_VkDevice(boxed_device);
2051 
2052         for (uint32_t i = 0; i < bindInfoCount; ++i) {
2053             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
2054         }
2055         VkResult result = vk->vkBindBufferMemory2(device, bindInfoCount, pBindInfos);
2056 
2057         if (result == VK_SUCCESS) {
2058             std::lock_guard<std::recursive_mutex> lock(mLock);
2059             for (uint32_t i = 0; i < bindInfoCount; ++i) {
2060                 setBufferMemoryBindInfoLocked(device, pBindInfos[i].buffer, pBindInfos[i].memory,
2061                                               pBindInfos[i].memoryOffset);
2062             }
2063         }
2064 
2065         return result;
2066     }
2067 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)2068     VkResult on_vkBindBufferMemory2KHR(android::base::BumpPool* pool, VkDevice boxed_device,
2069                                        uint32_t bindInfoCount,
2070                                        const VkBindBufferMemoryInfo* pBindInfos) {
2071         auto device = unbox_VkDevice(boxed_device);
2072         auto vk = dispatch_VkDevice(boxed_device);
2073 
2074         for (uint32_t i = 0; i < bindInfoCount; ++i) {
2075             VALIDATE_REQUIRED_HANDLE(pBindInfos[i].memory);
2076         }
2077         VkResult result = vk->vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos);
2078 
2079         if (result == VK_SUCCESS) {
2080             std::lock_guard<std::recursive_mutex> lock(mLock);
2081             for (uint32_t i = 0; i < bindInfoCount; ++i) {
2082                 setBufferMemoryBindInfoLocked(device, pBindInfos[i].buffer, pBindInfos[i].memory,
2083                                               pBindInfos[i].memoryOffset);
2084             }
2085         }
2086 
2087         return result;
2088     }
2089 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)2090     VkResult on_vkCreateImage(android::base::BumpPool* pool, VkDevice boxed_device,
2091                               const VkImageCreateInfo* pCreateInfo,
2092                               const VkAllocationCallbacks* pAllocator, VkImage* pImage) {
2093         auto device = unbox_VkDevice(boxed_device);
2094         auto vk = dispatch_VkDevice(boxed_device);
2095 
2096         std::lock_guard<std::recursive_mutex> lock(mLock);
2097 
2098         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2099         if (!deviceInfo) {
2100             return VK_ERROR_OUT_OF_HOST_MEMORY;
2101         }
2102 
2103         if (deviceInfo->imageFormats.find(pCreateInfo->format) == deviceInfo->imageFormats.end()) {
2104             INFO("gfxstream_texture_format_manifest: %s", string_VkFormat(pCreateInfo->format));
2105             deviceInfo->imageFormats.insert(pCreateInfo->format);
2106         }
2107 
2108         const bool needDecompression = deviceInfo->needEmulatedDecompression(pCreateInfo->format);
2109         CompressedImageInfo cmpInfo =
2110             needDecompression
2111                 ? CompressedImageInfo(device, *pCreateInfo, deviceInfo->decompPipelines.get())
2112                 : CompressedImageInfo(device);
2113         VkImageCreateInfo decompInfo;
2114         if (needDecompression) {
2115             decompInfo = cmpInfo.getOutputCreateInfo(*pCreateInfo);
2116             pCreateInfo = &decompInfo;
2117         }
2118 
2119         auto anbInfo = std::make_unique<AndroidNativeBufferInfo>();
2120         const VkNativeBufferANDROID* nativeBufferANDROID =
2121             vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
2122 
2123         VkResult createRes = VK_SUCCESS;
2124 
2125         if (nativeBufferANDROID) {
2126             auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
2127             if (!physicalDevice) {
2128                 return VK_ERROR_DEVICE_LOST;
2129             }
2130 
2131             auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
2132             if (!physicalDeviceInfo) {
2133                 return VK_ERROR_DEVICE_LOST;
2134             }
2135 
2136             const VkPhysicalDeviceMemoryProperties& memoryProperties =
2137                 physicalDeviceInfo->memoryPropertiesHelper->getHostMemoryProperties();
2138 
2139             createRes =
2140                 prepareAndroidNativeBufferImage(vk, device, *pool, pCreateInfo, nativeBufferANDROID,
2141                                                 pAllocator, &memoryProperties, anbInfo.get());
2142             if (createRes == VK_SUCCESS) {
2143                 *pImage = anbInfo->image;
2144             }
2145         } else {
2146             createRes = vk->vkCreateImage(device, pCreateInfo, pAllocator, pImage);
2147         }
2148 
2149         if (createRes != VK_SUCCESS) return createRes;
2150 
2151         if (needDecompression) {
2152             cmpInfo.setOutputImage(*pImage);
2153             cmpInfo.createCompressedMipmapImages(vk, *pCreateInfo);
2154 
2155             if (cmpInfo.isAstc()) {
2156                 if (deviceInfo->useAstcCpuDecompression) {
2157                     cmpInfo.initAstcCpuDecompression(m_vk, mDeviceInfo[device].physicalDevice);
2158                 }
2159             }
2160         }
2161 
2162         auto& imageInfo = mImageInfo[*pImage];
2163         imageInfo.device = device;
2164         imageInfo.cmpInfo = std::move(cmpInfo);
2165         imageInfo.imageCreateInfoShallow = vk_make_orphan_copy(*pCreateInfo);
2166         imageInfo.layout = pCreateInfo->initialLayout;
2167         if (nativeBufferANDROID) imageInfo.anbInfo = std::move(anbInfo);
2168 
2169         *pImage = new_boxed_non_dispatchable_VkImage(*pImage);
2170         return createRes;
2171     }
2172 
destroyImageLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkImage image,const VkAllocationCallbacks * pAllocator)2173     void destroyImageLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkImage image,
2174                             const VkAllocationCallbacks* pAllocator) {
2175         auto* imageInfo = android::base::find(mImageInfo, image);
2176         if (!imageInfo) return;
2177 
2178         if (!imageInfo->anbInfo) {
2179             imageInfo->cmpInfo.destroy(deviceDispatch);
2180             if (image != imageInfo->cmpInfo.outputImage()) {
2181                 deviceDispatch->vkDestroyImage(device, image, pAllocator);
2182             }
2183         }
2184         mImageInfo.erase(image);
2185     }
2186 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,const VkAllocationCallbacks * pAllocator)2187     void on_vkDestroyImage(android::base::BumpPool* pool, VkDevice boxed_device, VkImage image,
2188                            const VkAllocationCallbacks* pAllocator) {
2189         auto device = unbox_VkDevice(boxed_device);
2190         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2191 
2192         std::lock_guard<std::recursive_mutex> lock(mLock);
2193         destroyImageLocked(device, deviceDispatch, image, pAllocator);
2194     }
2195 
performBindImageMemoryDeferredAhb(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)2196     VkResult performBindImageMemoryDeferredAhb(android::base::BumpPool* pool,
2197                                                VkDevice boxed_device,
2198                                                const VkBindImageMemoryInfo* bimi) {
2199         auto device = unbox_VkDevice(boxed_device);
2200         auto vk = dispatch_VkDevice(boxed_device);
2201 
2202         auto original_underlying_image = bimi->image;
2203         auto original_boxed_image = unboxed_to_boxed_non_dispatchable_VkImage(original_underlying_image);
2204 
2205         VkImageCreateInfo ici = {};
2206         {
2207             std::lock_guard<std::recursive_mutex> lock(mLock);
2208 
2209             auto* imageInfo = android::base::find(mImageInfo, original_underlying_image);
2210             if (!imageInfo) {
2211                 ERR("Image for deferred AHB bind does not exist.");
2212                 return VK_ERROR_OUT_OF_HOST_MEMORY;
2213             }
2214 
2215             ici = imageInfo->imageCreateInfoShallow;
2216         }
2217 
2218         ici.pNext = vk_find_struct<VkNativeBufferANDROID>(bimi);
2219         if (!ici.pNext) {
2220             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
2221                 << "Missing VkNativeBufferANDROID for deferred AHB bind.";
2222         }
2223 
2224         VkImage boxed_replacement_image = VK_NULL_HANDLE;
2225         VkResult result = on_vkCreateImage(pool, boxed_device, &ici, nullptr, &boxed_replacement_image);
2226         if (result != VK_SUCCESS) {
2227             ERR("Failed to create image for deferred AHB bind.");
2228             return VK_ERROR_OUT_OF_HOST_MEMORY;
2229         }
2230 
2231         on_vkDestroyImage(pool, boxed_device, original_underlying_image, nullptr);
2232 
2233         {
2234             std::lock_guard<std::recursive_mutex> lock(mLock);
2235 
2236             auto underlying_replacement_image = unbox_VkImage(boxed_replacement_image);
2237             delete_VkImage(boxed_replacement_image);
2238             set_boxed_non_dispatchable_VkImage(original_boxed_image, underlying_replacement_image);
2239         }
2240 
2241         return VK_SUCCESS;
2242     }
2243 
performBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkBindImageMemoryInfo * bimi)2244     VkResult performBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
2245                                     const VkBindImageMemoryInfo* bimi) {
2246         auto image = bimi->image;
2247         auto memory = bimi->memory;
2248         auto memoryOffset = bimi->memoryOffset;
2249 
2250         const auto* anb = vk_find_struct<VkNativeBufferANDROID>(bimi);
2251         if (memory == VK_NULL_HANDLE && anb != nullptr) {
2252             return performBindImageMemoryDeferredAhb(pool, boxed_device, bimi);
2253         }
2254 
2255         auto device = unbox_VkDevice(boxed_device);
2256         auto vk = dispatch_VkDevice(boxed_device);
2257 
2258         VALIDATE_REQUIRED_HANDLE(memory);
2259         VkResult result = vk->vkBindImageMemory(device, image, memory, memoryOffset);
2260         if (result != VK_SUCCESS) {
2261             return result;
2262         }
2263 
2264         std::lock_guard<std::recursive_mutex> lock(mLock);
2265 
2266         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2267         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2268 
2269         auto* memoryInfo = android::base::find(mMemoryInfo, memory);
2270         if (!memoryInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2271 
2272 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
2273         if (memoryInfo->mtlTexture) {
2274             result = m_vk->vkSetMTLTextureMVK(image, memoryInfo->mtlTexture);
2275             if (result != VK_SUCCESS) {
2276                 fprintf(stderr, "vkSetMTLTexture failed\n");
2277                 return VK_ERROR_OUT_OF_HOST_MEMORY;
2278             }
2279         }
2280 #endif
2281 
2282         auto* imageInfo = android::base::find(mImageInfo, image);
2283         if (!imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2284         imageInfo->boundColorBuffer = memoryInfo->boundColorBuffer;
2285         if (imageInfo->boundColorBuffer) {
2286             deviceInfo->debugUtilsHelper.addDebugLabel(image, "ColorBuffer:%d",
2287                                                        *imageInfo->boundColorBuffer);
2288         }
2289         imageInfo->memory = memory;
2290 
2291         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
2292             return VK_SUCCESS;
2293         }
2294 
2295         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
2296         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
2297             return VK_SUCCESS;
2298         }
2299         return cmpInfo.bindCompressedMipmapsMemory(vk, memory, memoryOffset);
2300     }
2301 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)2302     VkResult on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice boxed_device,
2303                                   VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
2304         const VkBindImageMemoryInfo bimi = {
2305             .sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2306             .pNext = nullptr,
2307             .image = image,
2308             .memory = memory,
2309             .memoryOffset = memoryOffset,
2310         };
2311         return performBindImageMemory(pool, boxed_device, &bimi);
2312     }
2313 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)2314     VkResult on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice boxed_device,
2315                                    uint32_t bindInfoCount,
2316                                    const VkBindImageMemoryInfo* pBindInfos) {
2317         auto device = unbox_VkDevice(boxed_device);
2318         auto vk = dispatch_VkDevice(boxed_device);
2319         bool needEmulation = false;
2320 
2321         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2322         if (!deviceInfo) return VK_ERROR_UNKNOWN;
2323 
2324         for (uint32_t i = 0; i < bindInfoCount; i++) {
2325             auto* imageInfo = android::base::find(mImageInfo, pBindInfos[i].image);
2326             if (!imageInfo) return VK_ERROR_UNKNOWN;
2327 
2328             const auto* anb = vk_find_struct<VkNativeBufferANDROID>(&pBindInfos[i]);
2329             if (anb != nullptr) {
2330                 needEmulation = true;
2331                 break;
2332             }
2333 
2334             if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
2335                 needEmulation = true;
2336                 break;
2337             }
2338         }
2339 
2340         if (needEmulation) {
2341             VkResult result;
2342             for (uint32_t i = 0; i < bindInfoCount; i++) {
2343                 result = performBindImageMemory(pool, boxed_device, &pBindInfos[i]);
2344                 if (result != VK_SUCCESS) return result;
2345             }
2346 
2347             return VK_SUCCESS;
2348         }
2349 
2350         VkResult result = vk->vkBindImageMemory2(device, bindInfoCount, pBindInfos);
2351         if (result != VK_SUCCESS) {
2352             return result;
2353         }
2354 
2355         if (deviceInfo->debugUtilsHelper.isEnabled()) {
2356             std::lock_guard<std::recursive_mutex> lock(mLock);
2357             for (uint32_t i = 0; i < bindInfoCount; i++) {
2358                 auto* memoryInfo = android::base::find(mMemoryInfo, pBindInfos[i].memory);
2359                 if (!memoryInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2360 
2361                 if (memoryInfo->boundColorBuffer) {
2362                     deviceInfo->debugUtilsHelper.addDebugLabel(
2363                         pBindInfos[i].image, "ColorBuffer:%d", *memoryInfo->boundColorBuffer);
2364                 }
2365             }
2366         }
2367 
2368         return result;
2369     }
2370 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)2371     VkResult on_vkCreateImageView(android::base::BumpPool* pool, VkDevice boxed_device,
2372                                   const VkImageViewCreateInfo* pCreateInfo,
2373                                   const VkAllocationCallbacks* pAllocator, VkImageView* pView) {
2374         auto device = unbox_VkDevice(boxed_device);
2375         auto vk = dispatch_VkDevice(boxed_device);
2376         if (!pCreateInfo) {
2377             return VK_ERROR_OUT_OF_HOST_MEMORY;
2378         }
2379 
2380         std::lock_guard<std::recursive_mutex> lock(mLock);
2381         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2382         auto* imageInfo = android::base::find(mImageInfo, pCreateInfo->image);
2383         if (!deviceInfo || !imageInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2384         VkImageViewCreateInfo createInfo;
2385         bool needEmulatedAlpha = false;
2386         if (deviceInfo->needEmulatedDecompression(pCreateInfo->format)) {
2387             if (imageInfo->cmpInfo.outputImage()) {
2388                 createInfo = *pCreateInfo;
2389                 createInfo.format = CompressedImageInfo::getOutputFormat(pCreateInfo->format);
2390                 needEmulatedAlpha = CompressedImageInfo::needEmulatedAlpha(pCreateInfo->format);
2391                 createInfo.image = imageInfo->cmpInfo.outputImage();
2392                 pCreateInfo = &createInfo;
2393             }
2394         } else if (deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
2395             // Image view on the compressed mipmaps
2396             createInfo = *pCreateInfo;
2397             createInfo.format =
2398                 CompressedImageInfo::getCompressedMipmapsFormat(pCreateInfo->format);
2399             needEmulatedAlpha = false;
2400             createInfo.image =
2401                 imageInfo->cmpInfo.compressedMipmap(pCreateInfo->subresourceRange.baseMipLevel);
2402             createInfo.subresourceRange.baseMipLevel = 0;
2403             pCreateInfo = &createInfo;
2404         }
2405         if (imageInfo->anbInfo && imageInfo->anbInfo->externallyBacked) {
2406             createInfo = *pCreateInfo;
2407             pCreateInfo = &createInfo;
2408         }
2409 
2410         VkResult result = vk->vkCreateImageView(device, pCreateInfo, pAllocator, pView);
2411         if (result != VK_SUCCESS) {
2412             return result;
2413         }
2414 
2415         auto& imageViewInfo = mImageViewInfo[*pView];
2416         imageViewInfo.device = device;
2417         imageViewInfo.needEmulatedAlpha = needEmulatedAlpha;
2418         imageViewInfo.boundColorBuffer = imageInfo->boundColorBuffer;
2419         if (imageViewInfo.boundColorBuffer) {
2420             deviceInfo->debugUtilsHelper.addDebugLabel(*pView, "ColorBuffer:%d",
2421                                                        *imageViewInfo.boundColorBuffer);
2422         }
2423 
2424         *pView = new_boxed_non_dispatchable_VkImageView(*pView);
2425         return result;
2426     }
2427 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice boxed_device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)2428     void on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice boxed_device,
2429                                VkImageView imageView, const VkAllocationCallbacks* pAllocator) {
2430         auto device = unbox_VkDevice(boxed_device);
2431         auto vk = dispatch_VkDevice(boxed_device);
2432 
2433         vk->vkDestroyImageView(device, imageView, pAllocator);
2434         std::lock_guard<std::recursive_mutex> lock(mLock);
2435         mImageViewInfo.erase(imageView);
2436     }
2437 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice boxed_device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)2438     VkResult on_vkCreateSampler(android::base::BumpPool* pool, VkDevice boxed_device,
2439                                 const VkSamplerCreateInfo* pCreateInfo,
2440                                 const VkAllocationCallbacks* pAllocator, VkSampler* pSampler) {
2441         auto device = unbox_VkDevice(boxed_device);
2442         auto vk = dispatch_VkDevice(boxed_device);
2443         VkResult result = vk->vkCreateSampler(device, pCreateInfo, pAllocator, pSampler);
2444         if (result != VK_SUCCESS) {
2445             return result;
2446         }
2447         std::lock_guard<std::recursive_mutex> lock(mLock);
2448         auto& samplerInfo = mSamplerInfo[*pSampler];
2449         samplerInfo.device = device;
2450         deepcopy_VkSamplerCreateInfo(pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
2451                                      pCreateInfo, &samplerInfo.createInfo);
2452         // We emulate RGB with RGBA for some compressed textures, which does not
2453         // handle translarent border correctly.
2454         samplerInfo.needEmulatedAlpha =
2455             (pCreateInfo->addressModeU == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
2456              pCreateInfo->addressModeV == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER ||
2457              pCreateInfo->addressModeW == VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER) &&
2458             (pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK ||
2459              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_TRANSPARENT_BLACK ||
2460              pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_CUSTOM_EXT ||
2461              pCreateInfo->borderColor == VK_BORDER_COLOR_INT_CUSTOM_EXT);
2462 
2463         *pSampler = new_boxed_non_dispatchable_VkSampler(*pSampler);
2464 
2465         return result;
2466     }
2467 
destroySamplerLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2468     void destroySamplerLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkSampler sampler,
2469                               const VkAllocationCallbacks* pAllocator) {
2470         deviceDispatch->vkDestroySampler(device, sampler, pAllocator);
2471 
2472         auto* samplerInfo = android::base::find(mSamplerInfo, sampler);
2473         if (!samplerInfo) return;
2474 
2475         if (samplerInfo->emulatedborderSampler != VK_NULL_HANDLE) {
2476             deviceDispatch->vkDestroySampler(device, samplerInfo->emulatedborderSampler, nullptr);
2477         }
2478         mSamplerInfo.erase(sampler);
2479     }
2480 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice boxed_device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)2481     void on_vkDestroySampler(android::base::BumpPool* pool, VkDevice boxed_device,
2482                              VkSampler sampler, const VkAllocationCallbacks* pAllocator) {
2483         auto device = unbox_VkDevice(boxed_device);
2484         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2485 
2486         std::lock_guard<std::recursive_mutex> lock(mLock);
2487         destroySamplerLocked(device, deviceDispatch, sampler, pAllocator);
2488     }
2489 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)2490     VkResult on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
2491                                   const VkSemaphoreCreateInfo* pCreateInfo,
2492                                   const VkAllocationCallbacks* pAllocator,
2493                                   VkSemaphore* pSemaphore) {
2494         auto device = unbox_VkDevice(boxed_device);
2495         auto vk = dispatch_VkDevice(boxed_device);
2496 
2497         VkSemaphoreCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2498         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
2499 
2500         VkSemaphoreTypeCreateInfoKHR localSemaphoreTypeCreateInfo;
2501         if (const VkSemaphoreTypeCreateInfoKHR* semaphoreTypeCiPtr =
2502                 vk_find_struct<VkSemaphoreTypeCreateInfoKHR>(pCreateInfo);
2503             semaphoreTypeCiPtr) {
2504             localSemaphoreTypeCreateInfo = vk_make_orphan_copy(*semaphoreTypeCiPtr);
2505             vk_append_struct(&structChainIter, &localSemaphoreTypeCreateInfo);
2506         }
2507 
2508         const VkExportSemaphoreCreateInfoKHR* exportCiPtr =
2509             vk_find_struct<VkExportSemaphoreCreateInfoKHR>(pCreateInfo);
2510         VkExportSemaphoreCreateInfoKHR localSemaphoreCreateInfo;
2511 
2512         if (exportCiPtr) {
2513             localSemaphoreCreateInfo = vk_make_orphan_copy(*exportCiPtr);
2514 
2515 #ifdef _WIN32
2516             if (localSemaphoreCreateInfo.handleTypes) {
2517                 localSemaphoreCreateInfo.handleTypes =
2518                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR;
2519             }
2520 #endif
2521 
2522             vk_append_struct(&structChainIter, &localSemaphoreCreateInfo);
2523         }
2524 
2525         VkResult res = vk->vkCreateSemaphore(device, &localCreateInfo, pAllocator, pSemaphore);
2526 
2527         if (res != VK_SUCCESS) return res;
2528 
2529         std::lock_guard<std::recursive_mutex> lock(mLock);
2530 
2531         auto& semaphoreInfo = mSemaphoreInfo[*pSemaphore];
2532         semaphoreInfo.device = device;
2533 
2534         *pSemaphore = new_boxed_non_dispatchable_VkSemaphore(*pSemaphore);
2535 
2536         return res;
2537     }
2538 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice boxed_device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)2539     VkResult on_vkCreateFence(android::base::BumpPool* pool, VkDevice boxed_device,
2540                               const VkFenceCreateInfo* pCreateInfo,
2541                               const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
2542         auto device = unbox_VkDevice(boxed_device);
2543         auto vk = dispatch_VkDevice(boxed_device);
2544 
2545         VkFenceCreateInfo& createInfo = const_cast<VkFenceCreateInfo&>(*pCreateInfo);
2546 
2547         const VkExportFenceCreateInfo* exportFenceInfoPtr =
2548             vk_find_struct<VkExportFenceCreateInfo>(pCreateInfo);
2549         bool exportSyncFd = exportFenceInfoPtr && (exportFenceInfoPtr->handleTypes &
2550                                                    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
2551         bool fenceReused = false;
2552 
2553         *pFence = VK_NULL_HANDLE;
2554 
2555         if (exportSyncFd) {
2556             // Remove VkExportFenceCreateInfo, since host doesn't need to create
2557             // an exportable fence in this case
2558             ExternalFencePool<VulkanDispatch>* externalFencePool = nullptr;
2559             vk_struct_chain_remove(exportFenceInfoPtr, &createInfo);
2560             {
2561                 std::lock_guard<std::recursive_mutex> lock(mLock);
2562                 auto* deviceInfo = android::base::find(mDeviceInfo, device);
2563                 if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
2564                 externalFencePool = deviceInfo->externalFencePool.get();
2565             }
2566             *pFence = externalFencePool->pop(pCreateInfo);
2567             if (*pFence != VK_NULL_HANDLE) {
2568                 fenceReused = true;
2569             }
2570         }
2571 
2572         if (*pFence == VK_NULL_HANDLE) {
2573             VkResult res = vk->vkCreateFence(device, &createInfo, pAllocator, pFence);
2574             if (res != VK_SUCCESS) {
2575                 return res;
2576             }
2577         }
2578 
2579         {
2580             std::lock_guard<std::recursive_mutex> lock(mLock);
2581 
2582             DCHECK(fenceReused || mFenceInfo.find(*pFence) == mFenceInfo.end());
2583             // Create FenceInfo for *pFence.
2584             auto& fenceInfo = mFenceInfo[*pFence];
2585             fenceInfo.device = device;
2586             fenceInfo.vk = vk;
2587 
2588             *pFence = new_boxed_non_dispatchable_VkFence(*pFence);
2589             fenceInfo.boxed = *pFence;
2590             fenceInfo.external = exportSyncFd;
2591             fenceInfo.state = FenceInfo::State::kNotWaitable;
2592         }
2593 
2594         return VK_SUCCESS;
2595     }
2596 
on_vkResetFences(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t fenceCount,const VkFence * pFences)2597     VkResult on_vkResetFences(android::base::BumpPool* pool, VkDevice boxed_device,
2598                               uint32_t fenceCount, const VkFence* pFences) {
2599         auto device = unbox_VkDevice(boxed_device);
2600         auto vk = dispatch_VkDevice(boxed_device);
2601 
2602         std::vector<VkFence> cleanedFences;
2603         std::vector<VkFence> externalFences;
2604 
2605         {
2606             std::lock_guard<std::recursive_mutex> lock(mLock);
2607             for (uint32_t i = 0; i < fenceCount; i++) {
2608                 if (pFences[i] == VK_NULL_HANDLE) continue;
2609 
2610                 DCHECK(mFenceInfo.find(pFences[i]) != mFenceInfo.end());
2611                 if (mFenceInfo[pFences[i]].external) {
2612                     externalFences.push_back(pFences[i]);
2613                 } else {
2614                     // Reset all fences' states to kNotWaitable.
2615                     cleanedFences.push_back(pFences[i]);
2616                     mFenceInfo[pFences[i]].state = FenceInfo::State::kNotWaitable;
2617                 }
2618             }
2619         }
2620 
2621         if (!cleanedFences.empty()) {
2622             VK_CHECK(vk->vkResetFences(device, (uint32_t)cleanedFences.size(),
2623                                        cleanedFences.data()));
2624         }
2625 
2626         // For external fences, we unilaterally put them in the pool to ensure they finish
2627         // TODO: should store creation info / pNext chain per fence and re-apply?
2628         VkFenceCreateInfo createInfo{
2629             .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, .pNext = 0, .flags = 0};
2630         auto* deviceInfo = android::base::find(mDeviceInfo, device);
2631         if (!deviceInfo) return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2632         for (auto fence : externalFences) {
2633             VkFence replacement = deviceInfo->externalFencePool->pop(&createInfo);
2634             if (replacement == VK_NULL_HANDLE) {
2635                 VK_CHECK(vk->vkCreateFence(device, &createInfo, 0, &replacement));
2636             }
2637             deviceInfo->externalFencePool->add(fence);
2638 
2639             {
2640                 std::lock_guard<std::recursive_mutex> lock(mLock);
2641                 auto boxed_fence = unboxed_to_boxed_non_dispatchable_VkFence(fence);
2642                 set_boxed_non_dispatchable_VkFence(boxed_fence, replacement);
2643 
2644                 auto& fenceInfo = mFenceInfo[replacement];
2645                 fenceInfo.device = device;
2646                 fenceInfo.vk = vk;
2647                 fenceInfo.boxed = boxed_fence;
2648                 fenceInfo.external = true;
2649                 fenceInfo.state = FenceInfo::State::kNotWaitable;
2650 
2651                 mFenceInfo[fence].boxed = VK_NULL_HANDLE;
2652             }
2653         }
2654 
2655         return VK_SUCCESS;
2656     }
2657 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)2658     VkResult on_vkImportSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
2659                                        const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
2660         auto device = unbox_VkDevice(boxed_device);
2661         auto vk = dispatch_VkDevice(boxed_device);
2662 
2663 #ifdef _WIN32
2664         std::lock_guard<std::recursive_mutex> lock(mLock);
2665 
2666         auto* infoPtr = android::base::find(mSemaphoreInfo,
2667                                             mExternalSemaphoresById[pImportSemaphoreFdInfo->fd]);
2668 
2669         if (!infoPtr) {
2670             return VK_ERROR_INVALID_EXTERNAL_HANDLE;
2671         }
2672 
2673         VK_EXT_SYNC_HANDLE handle = dupExternalSync(infoPtr->externalHandle);
2674 
2675         VkImportSemaphoreWin32HandleInfoKHR win32ImportInfo = {
2676             VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
2677             0,
2678             pImportSemaphoreFdInfo->semaphore,
2679             pImportSemaphoreFdInfo->flags,
2680             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR,
2681             handle,
2682             L"",
2683         };
2684 
2685         return vk->vkImportSemaphoreWin32HandleKHR(device, &win32ImportInfo);
2686 #else
2687         if (!hasDeviceExtension(device, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
2688             // Note: VK_KHR_external_semaphore_fd might be advertised in the guest,
2689             // because SYNC_FD handling is performed guest-side only. But still need
2690             // need to error out here when handling a non-sync, opaque FD.
2691             return VK_ERROR_OUT_OF_HOST_MEMORY;
2692         }
2693 
2694         VkImportSemaphoreFdInfoKHR importInfo = *pImportSemaphoreFdInfo;
2695         importInfo.fd = dup(pImportSemaphoreFdInfo->fd);
2696         return vk->vkImportSemaphoreFdKHR(device, &importInfo);
2697 #endif
2698     }
2699 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)2700     VkResult on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool, VkDevice boxed_device,
2701                                     const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd) {
2702         auto device = unbox_VkDevice(boxed_device);
2703         auto vk = dispatch_VkDevice(boxed_device);
2704 #ifdef _WIN32
2705         VkSemaphoreGetWin32HandleInfoKHR getWin32 = {
2706             VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
2707             0,
2708             pGetFdInfo->semaphore,
2709             VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
2710         };
2711         VK_EXT_SYNC_HANDLE handle;
2712         VkResult result = vk->vkGetSemaphoreWin32HandleKHR(device, &getWin32, &handle);
2713         if (result != VK_SUCCESS) {
2714             return result;
2715         }
2716         std::lock_guard<std::recursive_mutex> lock(mLock);
2717         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = handle;
2718         int nextId = genSemaphoreId();
2719         mExternalSemaphoresById[nextId] = pGetFdInfo->semaphore;
2720         *pFd = nextId;
2721 #else
2722         if (!hasDeviceExtension(device, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
2723             // Note: VK_KHR_external_semaphore_fd might be advertised in the guest,
2724             // because SYNC_FD handling is performed guest-side only. But still need
2725             // need to error out here when handling a non-sync, opaque FD.
2726             return VK_ERROR_OUT_OF_HOST_MEMORY;
2727         }
2728 
2729         VkResult result = vk->vkGetSemaphoreFdKHR(device, pGetFdInfo, pFd);
2730         if (result != VK_SUCCESS) {
2731             return result;
2732         }
2733 
2734         std::lock_guard<std::recursive_mutex> lock(mLock);
2735 
2736         mSemaphoreInfo[pGetFdInfo->semaphore].externalHandle = *pFd;
2737         // No next id; its already an fd
2738 #endif
2739         return result;
2740     }
2741 
destroySemaphoreLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)2742     void destroySemaphoreLocked(VkDevice device, VulkanDispatch* deviceDispatch,
2743                                 VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
2744         auto semaphoreInfoIt = mSemaphoreInfo.find(semaphore);
2745         if (semaphoreInfoIt == mSemaphoreInfo.end()) return;
2746         auto& semaphoreInfo = semaphoreInfoIt->second;
2747 
2748 #ifndef _WIN32
2749         if (semaphoreInfo.externalHandle != VK_EXT_SYNC_HANDLE_INVALID) {
2750             close(semaphoreInfo.externalHandle);
2751         }
2752 #endif
2753 
2754         if (semaphoreInfo.latestUse && !IsDone(*semaphoreInfo.latestUse)) {
2755             auto deviceInfoIt = mDeviceInfo.find(device);
2756             if (deviceInfoIt != mDeviceInfo.end()) {
2757                 auto& deviceInfo = deviceInfoIt->second;
2758                 deviceInfo.deviceOpTracker->AddPendingGarbage(*semaphoreInfo.latestUse, semaphore);
2759                 deviceInfo.deviceOpTracker->PollAndProcessGarbage();
2760             }
2761         } else {
2762             deviceDispatch->vkDestroySemaphore(device, semaphore, pAllocator);
2763         }
2764 
2765         mSemaphoreInfo.erase(semaphoreInfoIt);
2766     }
2767 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice boxed_device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)2768     void on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice boxed_device,
2769                                VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator) {
2770         auto device = unbox_VkDevice(boxed_device);
2771         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2772 
2773         std::lock_guard<std::recursive_mutex> lock(mLock);
2774         destroySemaphoreLocked(device, deviceDispatch, semaphore, pAllocator);
2775     }
2776 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice boxed_device,VkFence fence,const VkAllocationCallbacks * pAllocator)2777     void on_vkDestroyFence(android::base::BumpPool* pool, VkDevice boxed_device, VkFence fence,
2778                            const VkAllocationCallbacks* pAllocator) {
2779         auto device = unbox_VkDevice(boxed_device);
2780         auto deviceDispatch = dispatch_VkDevice(boxed_device);
2781 
2782         bool destructionDeferred = false;
2783         {
2784             std::lock_guard<std::recursive_mutex> lock(mLock);
2785 
2786             auto fenceInfoIt = mFenceInfo.find(fence);
2787             if (fenceInfoIt == mFenceInfo.end()) {
2788                 ERR("Failed to find fence info for VkFence:%p. Leaking fence!", fence);
2789                 return;
2790             }
2791             auto& fenceInfo = fenceInfoIt->second;
2792 
2793             auto deviceInfoIt = mDeviceInfo.find(device);
2794             if (deviceInfoIt == mDeviceInfo.end()) {
2795                 ERR("Failed to find device info for VkDevice:%p for VkFence:%p. Leaking fence!",
2796                     device, fence);
2797                 return;
2798             }
2799             auto& deviceInfo = deviceInfoIt->second;
2800 
2801             fenceInfo.boxed = VK_NULL_HANDLE;
2802 
2803             // External fences are just slated for recycling. This addresses known
2804             // behavior where the guest might destroy the fence prematurely. b/228221208
2805             if (fenceInfo.external) {
2806                 deviceInfo.externalFencePool->add(fence);
2807                 return;
2808             }
2809 
2810             // Fences used for swapchains have their destruction deferred.
2811             if (fenceInfo.latestUse && !IsDone(*fenceInfo.latestUse)) {
2812                 deviceInfo.deviceOpTracker->AddPendingGarbage(*fenceInfo.latestUse, fence);
2813                 deviceInfo.deviceOpTracker->PollAndProcessGarbage();
2814                 destructionDeferred = true;
2815             }
2816             mFenceInfo.erase(fence);
2817         }
2818 
2819         if (!destructionDeferred) {
2820             deviceDispatch->vkDestroyFence(device, fence, pAllocator);
2821         }
2822     }
2823 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)2824     VkResult on_vkCreateDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
2825                                             const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
2826                                             const VkAllocationCallbacks* pAllocator,
2827                                             VkDescriptorSetLayout* pSetLayout) {
2828         auto device = unbox_VkDevice(boxed_device);
2829         auto vk = dispatch_VkDevice(boxed_device);
2830 
2831         auto res = vk->vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
2832 
2833         if (res == VK_SUCCESS) {
2834             std::lock_guard<std::recursive_mutex> lock(mLock);
2835             auto& info = mDescriptorSetLayoutInfo[*pSetLayout];
2836             info.device = device;
2837             *pSetLayout = new_boxed_non_dispatchable_VkDescriptorSetLayout(*pSetLayout);
2838             info.boxed = *pSetLayout;
2839 
2840             info.createInfo = *pCreateInfo;
2841             for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) {
2842                 info.bindings.push_back(pCreateInfo->pBindings[i]);
2843             }
2844         }
2845 
2846         return res;
2847     }
2848 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)2849     void on_vkDestroyDescriptorSetLayout(android::base::BumpPool* pool, VkDevice boxed_device,
2850                                          VkDescriptorSetLayout descriptorSetLayout,
2851                                          const VkAllocationCallbacks* pAllocator) {
2852         auto device = unbox_VkDevice(boxed_device);
2853         auto vk = dispatch_VkDevice(boxed_device);
2854 
2855         vk->vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
2856 
2857         std::lock_guard<std::recursive_mutex> lock(mLock);
2858         mDescriptorSetLayoutInfo.erase(descriptorSetLayout);
2859     }
2860 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)2861     VkResult on_vkCreateDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2862                                        const VkDescriptorPoolCreateInfo* pCreateInfo,
2863                                        const VkAllocationCallbacks* pAllocator,
2864                                        VkDescriptorPool* pDescriptorPool) {
2865         auto device = unbox_VkDevice(boxed_device);
2866         auto vk = dispatch_VkDevice(boxed_device);
2867 
2868         auto res = vk->vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
2869 
2870         if (res == VK_SUCCESS) {
2871             std::lock_guard<std::recursive_mutex> lock(mLock);
2872             auto& info = mDescriptorPoolInfo[*pDescriptorPool];
2873             info.device = device;
2874             *pDescriptorPool = new_boxed_non_dispatchable_VkDescriptorPool(*pDescriptorPool);
2875             info.boxed = *pDescriptorPool;
2876             info.createInfo = *pCreateInfo;
2877             info.maxSets = pCreateInfo->maxSets;
2878             info.usedSets = 0;
2879 
2880             for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; ++i) {
2881                 DescriptorPoolInfo::PoolState state;
2882                 state.type = pCreateInfo->pPoolSizes[i].type;
2883                 state.descriptorCount = pCreateInfo->pPoolSizes[i].descriptorCount;
2884                 state.used = 0;
2885                 info.pools.push_back(state);
2886             }
2887 
2888             if (m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
2889                 for (uint32_t i = 0; i < pCreateInfo->maxSets; ++i) {
2890                     info.poolIds.push_back(
2891                         (uint64_t)new_boxed_non_dispatchable_VkDescriptorSet(VK_NULL_HANDLE));
2892                 }
2893                 if (snapshotsEnabled()) {
2894                     snapshot()->createExtraHandlesForNextApi(info.poolIds.data(),
2895                                                              info.poolIds.size());
2896                 }
2897             }
2898         }
2899 
2900         return res;
2901     }
2902 
cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool,bool isDestroy=false)2903     void cleanupDescriptorPoolAllocedSetsLocked(VkDescriptorPool descriptorPool,
2904                                                 bool isDestroy = false) {
2905         auto* info = android::base::find(mDescriptorPoolInfo, descriptorPool);
2906         if (!info) return;
2907 
2908         for (auto it : info->allocedSetsToBoxed) {
2909             auto unboxedSet = it.first;
2910             auto boxedSet = it.second;
2911             mDescriptorSetInfo.erase(unboxedSet);
2912             if (!m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
2913                 delete_VkDescriptorSet(boxedSet);
2914             }
2915         }
2916 
2917         if (m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
2918             if (isDestroy) {
2919                 for (auto poolId : info->poolIds) {
2920                     delete_VkDescriptorSet((VkDescriptorSet)poolId);
2921                 }
2922             } else {
2923                 for (auto poolId : info->poolIds) {
2924                     auto handleInfo = sBoxedHandleManager.get(poolId);
2925                     if (handleInfo)
2926                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
2927                 }
2928             }
2929         }
2930 
2931         info->usedSets = 0;
2932         info->allocedSetsToBoxed.clear();
2933 
2934         for (auto& pool : info->pools) {
2935             pool.used = 0;
2936         }
2937     }
2938 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)2939     void on_vkDestroyDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2940                                     VkDescriptorPool descriptorPool,
2941                                     const VkAllocationCallbacks* pAllocator) {
2942         auto device = unbox_VkDevice(boxed_device);
2943         auto vk = dispatch_VkDevice(boxed_device);
2944 
2945         vk->vkDestroyDescriptorPool(device, descriptorPool, pAllocator);
2946 
2947         std::lock_guard<std::recursive_mutex> lock(mLock);
2948         cleanupDescriptorPoolAllocedSetsLocked(descriptorPool, true /* destroy */);
2949         mDescriptorPoolInfo.erase(descriptorPool);
2950     }
2951 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)2952     VkResult on_vkResetDescriptorPool(android::base::BumpPool* pool, VkDevice boxed_device,
2953                                       VkDescriptorPool descriptorPool,
2954                                       VkDescriptorPoolResetFlags flags) {
2955         auto device = unbox_VkDevice(boxed_device);
2956         auto vk = dispatch_VkDevice(boxed_device);
2957 
2958         auto res = vk->vkResetDescriptorPool(device, descriptorPool, flags);
2959 
2960         if (res == VK_SUCCESS) {
2961             std::lock_guard<std::recursive_mutex> lock(mLock);
2962             cleanupDescriptorPoolAllocedSetsLocked(descriptorPool);
2963         }
2964 
2965         return res;
2966     }
2967 
initDescriptorSetInfoLocked(VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t boxedDescriptorSet,VkDescriptorSet descriptorSet)2968     void initDescriptorSetInfoLocked(VkDescriptorPool pool, VkDescriptorSetLayout setLayout,
2969                                      uint64_t boxedDescriptorSet, VkDescriptorSet descriptorSet) {
2970         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
2971         if (!poolInfo) {
2972             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find poolInfo";
2973         }
2974 
2975         auto* setLayoutInfo = android::base::find(mDescriptorSetLayoutInfo, setLayout);
2976         if (!setLayoutInfo) {
2977             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Cannot find setLayout";
2978         }
2979 
2980         auto& setInfo = mDescriptorSetInfo[descriptorSet];
2981 
2982         setInfo.pool = pool;
2983         setInfo.unboxedLayout = setLayout;
2984         setInfo.bindings = setLayoutInfo->bindings;
2985         for (size_t i = 0; i < setInfo.bindings.size(); i++) {
2986             VkDescriptorSetLayoutBinding dslBinding = setInfo.bindings[i];
2987             int bindingIdx = dslBinding.binding;
2988             if (setInfo.allWrites.size() <= bindingIdx) {
2989                 setInfo.allWrites.resize(bindingIdx + 1);
2990             }
2991             setInfo.allWrites[bindingIdx].resize(dslBinding.descriptorCount);
2992             for (auto& write : setInfo.allWrites[bindingIdx]) {
2993                 write.descriptorType = dslBinding.descriptorType;
2994                 write.dstArrayElement = 0;
2995             }
2996         }
2997 
2998         poolInfo->allocedSetsToBoxed[descriptorSet] = (VkDescriptorSet)boxedDescriptorSet;
2999         applyDescriptorSetAllocationLocked(*poolInfo, setInfo.bindings);
3000     }
3001 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)3002     VkResult on_vkAllocateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
3003                                          const VkDescriptorSetAllocateInfo* pAllocateInfo,
3004                                          VkDescriptorSet* pDescriptorSets) {
3005         auto device = unbox_VkDevice(boxed_device);
3006         auto vk = dispatch_VkDevice(boxed_device);
3007 
3008         std::lock_guard<std::recursive_mutex> lock(mLock);
3009 
3010         auto allocValidationRes = validateDescriptorSetAllocLocked(pAllocateInfo);
3011         if (allocValidationRes != VK_SUCCESS) return allocValidationRes;
3012 
3013         auto res = vk->vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3014 
3015         if (res == VK_SUCCESS) {
3016             auto* poolInfo =
3017                 android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
3018             if (!poolInfo) return res;
3019 
3020             for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
3021                 auto unboxed = pDescriptorSets[i];
3022                 pDescriptorSets[i] = new_boxed_non_dispatchable_VkDescriptorSet(pDescriptorSets[i]);
3023                 initDescriptorSetInfoLocked(pAllocateInfo->descriptorPool,
3024                                             pAllocateInfo->pSetLayouts[i],
3025                                             (uint64_t)(pDescriptorSets[i]), unboxed);
3026             }
3027         }
3028 
3029         return res;
3030     }
3031 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)3032     VkResult on_vkFreeDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
3033                                      VkDescriptorPool descriptorPool, uint32_t descriptorSetCount,
3034                                      const VkDescriptorSet* pDescriptorSets) {
3035         auto device = unbox_VkDevice(boxed_device);
3036         auto vk = dispatch_VkDevice(boxed_device);
3037 
3038         auto res =
3039             vk->vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
3040 
3041         if (res == VK_SUCCESS) {
3042             std::lock_guard<std::recursive_mutex> lock(mLock);
3043 
3044             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
3045                 auto* setInfo = android::base::find(mDescriptorSetInfo, pDescriptorSets[i]);
3046                 if (!setInfo) continue;
3047                 auto* poolInfo = android::base::find(mDescriptorPoolInfo, setInfo->pool);
3048                 if (!poolInfo) continue;
3049 
3050                 removeDescriptorSetAllocationLocked(*poolInfo, setInfo->bindings);
3051 
3052                 auto descSetAllocedEntry =
3053                     android::base::find(poolInfo->allocedSetsToBoxed, pDescriptorSets[i]);
3054                 if (!descSetAllocedEntry) continue;
3055 
3056                 auto handleInfo = sBoxedHandleManager.get((uint64_t)*descSetAllocedEntry);
3057                 if (handleInfo) {
3058                     if (m_emu->features.VulkanBatchedDescriptorSetUpdate.enabled) {
3059                         handleInfo->underlying = reinterpret_cast<uint64_t>(VK_NULL_HANDLE);
3060                     } else {
3061                         delete_VkDescriptorSet(*descSetAllocedEntry);
3062                     }
3063                 }
3064 
3065                 poolInfo->allocedSetsToBoxed.erase(pDescriptorSets[i]);
3066 
3067                 mDescriptorSetInfo.erase(pDescriptorSets[i]);
3068             }
3069         }
3070 
3071         return res;
3072     }
3073 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice boxed_device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)3074     void on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice boxed_device,
3075                                    uint32_t descriptorWriteCount,
3076                                    const VkWriteDescriptorSet* pDescriptorWrites,
3077                                    uint32_t descriptorCopyCount,
3078                                    const VkCopyDescriptorSet* pDescriptorCopies) {
3079         auto device = unbox_VkDevice(boxed_device);
3080         auto vk = dispatch_VkDevice(boxed_device);
3081 
3082         std::lock_guard<std::recursive_mutex> lock(mLock);
3083         on_vkUpdateDescriptorSetsImpl(pool, vk, device, descriptorWriteCount, pDescriptorWrites,
3084                                       descriptorCopyCount, pDescriptorCopies);
3085     }
3086 
on_vkUpdateDescriptorSetsImpl(android::base::BumpPool * pool,VulkanDispatch * vk,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)3087     void on_vkUpdateDescriptorSetsImpl(android::base::BumpPool* pool, VulkanDispatch* vk,
3088                                        VkDevice device, uint32_t descriptorWriteCount,
3089                                        const VkWriteDescriptorSet* pDescriptorWrites,
3090                                        uint32_t descriptorCopyCount,
3091                                        const VkCopyDescriptorSet* pDescriptorCopies) {
3092         if (snapshotsEnabled()) {
3093             for (uint32_t writeIdx = 0; writeIdx < descriptorWriteCount; writeIdx++) {
3094                 const VkWriteDescriptorSet& descriptorWrite = pDescriptorWrites[writeIdx];
3095                 auto ite = mDescriptorSetInfo.find(descriptorWrite.dstSet);
3096                 if (ite == mDescriptorSetInfo.end()) {
3097                     continue;
3098                 }
3099                 DescriptorSetInfo& descriptorSetInfo = ite->second;
3100                 auto& table = descriptorSetInfo.allWrites;
3101                 VkDescriptorType descType = descriptorWrite.descriptorType;
3102                 uint32_t dstBinding = descriptorWrite.dstBinding;
3103                 uint32_t dstArrayElement = descriptorWrite.dstArrayElement;
3104                 uint32_t descriptorCount = descriptorWrite.descriptorCount;
3105 
3106                 uint32_t arrOffset = dstArrayElement;
3107 
3108                 if (isDescriptorTypeImageInfo(descType)) {
3109                     for (uint32_t writeElemIdx = 0; writeElemIdx < descriptorCount;
3110                          ++writeElemIdx, ++arrOffset) {
3111                         // Descriptor writes wrap to the next binding. See
3112                         // https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkWriteDescriptorSet.html
3113                         if (arrOffset >= table[dstBinding].size()) {
3114                             ++dstBinding;
3115                             arrOffset = 0;
3116                         }
3117                         auto& entry = table[dstBinding][arrOffset];
3118                         entry.imageInfo = descriptorWrite.pImageInfo[writeElemIdx];
3119                         entry.writeType = DescriptorSetInfo::DescriptorWriteType::ImageInfo;
3120                         entry.descriptorType = descType;
3121                         entry.alives.clear();
3122                         if (descriptorTypeContainsImage(descType)) {
3123                             auto* imageViewInfo =
3124                                 android::base::find(mImageViewInfo, entry.imageInfo.imageView);
3125                             if (imageViewInfo) {
3126                                 entry.alives.push_back(imageViewInfo->alive);
3127                             }
3128                         }
3129                         if (descriptorTypeContainsSampler(descType)) {
3130                             auto* samplerInfo =
3131                                 android::base::find(mSamplerInfo, entry.imageInfo.sampler);
3132                             if (samplerInfo) {
3133                                 entry.alives.push_back(samplerInfo->alive);
3134                             }
3135                         }
3136                     }
3137                 } else if (isDescriptorTypeBufferInfo(descType)) {
3138                     for (uint32_t writeElemIdx = 0; writeElemIdx < descriptorCount;
3139                          ++writeElemIdx, ++arrOffset) {
3140                         if (arrOffset >= table[dstBinding].size()) {
3141                             ++dstBinding;
3142                             arrOffset = 0;
3143                         }
3144                         auto& entry = table[dstBinding][arrOffset];
3145                         entry.bufferInfo = descriptorWrite.pBufferInfo[writeElemIdx];
3146                         entry.writeType = DescriptorSetInfo::DescriptorWriteType::BufferInfo;
3147                         entry.descriptorType = descType;
3148                         entry.alives.clear();
3149                         auto* bufferInfo =
3150                             android::base::find(mBufferInfo, entry.bufferInfo.buffer);
3151                         if (bufferInfo) {
3152                             entry.alives.push_back(bufferInfo->alive);
3153                         }
3154                     }
3155                 } else if (isDescriptorTypeBufferView(descType)) {
3156                     for (uint32_t writeElemIdx = 0; writeElemIdx < descriptorCount;
3157                          ++writeElemIdx, ++arrOffset) {
3158                         if (arrOffset >= table[dstBinding].size()) {
3159                             ++dstBinding;
3160                             arrOffset = 0;
3161                         }
3162                         auto& entry = table[dstBinding][arrOffset];
3163                         entry.bufferView = descriptorWrite.pTexelBufferView[writeElemIdx];
3164                         entry.writeType = DescriptorSetInfo::DescriptorWriteType::BufferView;
3165                         entry.descriptorType = descType;
3166                         // TODO: check alive
3167                         ERR("%s: Snapshot for texel buffer view is incomplete.\n", __func__);
3168                     }
3169                 } else if (isDescriptorTypeInlineUniformBlock(descType)) {
3170                     const VkWriteDescriptorSetInlineUniformBlock* descInlineUniformBlock =
3171                         static_cast<const VkWriteDescriptorSetInlineUniformBlock*>(
3172                             descriptorWrite.pNext);
3173                     while (descInlineUniformBlock &&
3174                            descInlineUniformBlock->sType !=
3175                                VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK) {
3176                         descInlineUniformBlock =
3177                             static_cast<const VkWriteDescriptorSetInlineUniformBlock*>(
3178                                 descInlineUniformBlock->pNext);
3179                     }
3180                     if (!descInlineUniformBlock) {
3181                         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3182                             << __func__ << ": did not find inline uniform block";
3183                         return;
3184                     }
3185                     auto& entry = table[dstBinding][0];
3186                     entry.inlineUniformBlock = *descInlineUniformBlock;
3187                     entry.inlineUniformBlockBuffer.assign(
3188                         static_cast<const uint8_t*>(descInlineUniformBlock->pData),
3189                         static_cast<const uint8_t*>(descInlineUniformBlock->pData) +
3190                             descInlineUniformBlock->dataSize);
3191                     entry.writeType = DescriptorSetInfo::DescriptorWriteType::InlineUniformBlock;
3192                     entry.descriptorType = descType;
3193                     entry.dstArrayElement = dstArrayElement;
3194                 } else if (isDescriptorTypeAccelerationStructure(descType)) {
3195                     // TODO
3196                     // Look for pNext inline uniform block or acceleration structure.
3197                     // Append new DescriptorWrite entry that holds the buffer
3198                     ERR("%s: Ignoring Snapshot for emulated write for descriptor type 0x%x\n",
3199                         __func__, descType);
3200                 }
3201             }
3202             // TODO: bookkeep pDescriptorCopies
3203             // Our primary use case vkQueueCommitDescriptorSetUpdatesGOOGLE does not use
3204             // pDescriptorCopies. Thus skip its implementation for now.
3205             if (descriptorCopyCount) {
3206                 ERR("%s: Snapshot does not support descriptor copy yet\n");
3207             }
3208         }
3209         bool needEmulateWriteDescriptor = false;
3210         // c++ seems to allow for 0-size array allocation
3211         std::unique_ptr<bool[]> descriptorWritesNeedDeepCopy(new bool[descriptorWriteCount]);
3212         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
3213             const VkWriteDescriptorSet& descriptorWrite = pDescriptorWrites[i];
3214             descriptorWritesNeedDeepCopy[i] = false;
3215             if (!vk_util::vk_descriptor_type_has_image_view(descriptorWrite.descriptorType)) {
3216                 continue;
3217             }
3218             for (uint32_t j = 0; j < descriptorWrite.descriptorCount; j++) {
3219                 const VkDescriptorImageInfo& imageInfo = descriptorWrite.pImageInfo[j];
3220                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
3221                 if (!imgViewInfo) {
3222                     continue;
3223                 }
3224                 if (imgViewInfo->boundColorBuffer) {
3225                     // TODO(igorc): Move this to vkQueueSubmit time.
3226                     // Likely can be removed after b/323596143
3227                     auto fb = FrameBuffer::getFB();
3228                     if (fb) {
3229                         fb->invalidateColorBufferForVk(*imgViewInfo->boundColorBuffer);
3230                     }
3231                 }
3232                 if (descriptorWrite.descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) {
3233                     continue;
3234                 }
3235                 const auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
3236                 if (samplerInfo && imgViewInfo->needEmulatedAlpha &&
3237                     samplerInfo->needEmulatedAlpha) {
3238                     needEmulateWriteDescriptor = true;
3239                     descriptorWritesNeedDeepCopy[i] = true;
3240                     break;
3241                 }
3242             }
3243         }
3244         if (!needEmulateWriteDescriptor) {
3245             vk->vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites,
3246                                        descriptorCopyCount, pDescriptorCopies);
3247             return;
3248         }
3249         std::list<std::unique_ptr<VkDescriptorImageInfo[]>> imageInfoPool;
3250         std::unique_ptr<VkWriteDescriptorSet[]> descriptorWrites(
3251             new VkWriteDescriptorSet[descriptorWriteCount]);
3252         for (uint32_t i = 0; i < descriptorWriteCount; i++) {
3253             const VkWriteDescriptorSet& srcDescriptorWrite = pDescriptorWrites[i];
3254             VkWriteDescriptorSet& dstDescriptorWrite = descriptorWrites[i];
3255             // Shallow copy first
3256             dstDescriptorWrite = srcDescriptorWrite;
3257             if (!descriptorWritesNeedDeepCopy[i]) {
3258                 continue;
3259             }
3260             // Deep copy
3261             assert(dstDescriptorWrite.descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
3262             imageInfoPool.emplace_back(
3263                 new VkDescriptorImageInfo[dstDescriptorWrite.descriptorCount]);
3264             VkDescriptorImageInfo* imageInfos = imageInfoPool.back().get();
3265             memcpy(imageInfos, srcDescriptorWrite.pImageInfo,
3266                    dstDescriptorWrite.descriptorCount * sizeof(VkDescriptorImageInfo));
3267             dstDescriptorWrite.pImageInfo = imageInfos;
3268             for (uint32_t j = 0; j < dstDescriptorWrite.descriptorCount; j++) {
3269                 VkDescriptorImageInfo& imageInfo = imageInfos[j];
3270                 const auto* imgViewInfo = android::base::find(mImageViewInfo, imageInfo.imageView);
3271                 auto* samplerInfo = android::base::find(mSamplerInfo, imageInfo.sampler);
3272                 if (!imgViewInfo || !samplerInfo) continue;
3273                 if (imgViewInfo->needEmulatedAlpha && samplerInfo->needEmulatedAlpha) {
3274                     if (samplerInfo->emulatedborderSampler == VK_NULL_HANDLE) {
3275                         // create the emulated sampler
3276                         VkSamplerCreateInfo createInfo;
3277                         deepcopy_VkSamplerCreateInfo(pool, VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
3278                                                      &samplerInfo->createInfo, &createInfo);
3279                         switch (createInfo.borderColor) {
3280                             case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
3281                                 createInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
3282                                 break;
3283                             case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
3284                                 createInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
3285                                 break;
3286                             case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
3287                             case VK_BORDER_COLOR_INT_CUSTOM_EXT: {
3288                                 VkSamplerCustomBorderColorCreateInfoEXT*
3289                                     customBorderColorCreateInfo =
3290                                         vk_find_struct<VkSamplerCustomBorderColorCreateInfoEXT>(
3291                                             &createInfo);
3292                                 if (customBorderColorCreateInfo) {
3293                                     switch (createInfo.borderColor) {
3294                                         case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
3295                                             customBorderColorCreateInfo->customBorderColor
3296                                                 .float32[3] = 1.0f;
3297                                             break;
3298                                         case VK_BORDER_COLOR_INT_CUSTOM_EXT:
3299                                             customBorderColorCreateInfo->customBorderColor
3300                                                 .int32[3] = 128;
3301                                             break;
3302                                         default:
3303                                             break;
3304                                     }
3305                                 }
3306                                 break;
3307                             }
3308                             default:
3309                                 break;
3310                         }
3311                         vk->vkCreateSampler(device, &createInfo, nullptr,
3312                                             &samplerInfo->emulatedborderSampler);
3313                     }
3314                     imageInfo.sampler = samplerInfo->emulatedborderSampler;
3315                 }
3316             }
3317         }
3318         vk->vkUpdateDescriptorSets(device, descriptorWriteCount, descriptorWrites.get(),
3319                                    descriptorCopyCount, pDescriptorCopies);
3320     }
3321 
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)3322     VkResult on_vkCreateShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
3323                                      const VkShaderModuleCreateInfo* pCreateInfo,
3324                                      const VkAllocationCallbacks* pAllocator,
3325                                      VkShaderModule* pShaderModule) {
3326         auto device = unbox_VkDevice(boxed_device);
3327         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3328 
3329         VkResult result =
3330             deviceDispatch->vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
3331         if (result != VK_SUCCESS) {
3332             return result;
3333         }
3334 
3335         std::lock_guard<std::recursive_mutex> lock(mLock);
3336 
3337         auto& shaderModuleInfo = mShaderModuleInfo[*pShaderModule];
3338         shaderModuleInfo.device = device;
3339 
3340         *pShaderModule = new_boxed_non_dispatchable_VkShaderModule(*pShaderModule);
3341 
3342         return result;
3343     }
3344 
destroyShaderModuleLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)3345     void destroyShaderModuleLocked(VkDevice device, VulkanDispatch* deviceDispatch,
3346                                    VkShaderModule shaderModule,
3347                                    const VkAllocationCallbacks* pAllocator) {
3348         deviceDispatch->vkDestroyShaderModule(device, shaderModule, pAllocator);
3349 
3350         mShaderModuleInfo.erase(shaderModule);
3351     }
3352 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)3353     void on_vkDestroyShaderModule(android::base::BumpPool* pool, VkDevice boxed_device,
3354                                   VkShaderModule shaderModule,
3355                                   const VkAllocationCallbacks* pAllocator) {
3356         auto device = unbox_VkDevice(boxed_device);
3357         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3358 
3359         std::lock_guard<std::recursive_mutex> lock(mLock);
3360         destroyShaderModuleLocked(device, deviceDispatch, shaderModule, pAllocator);
3361     }
3362 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)3363     VkResult on_vkCreatePipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
3364                                       const VkPipelineCacheCreateInfo* pCreateInfo,
3365                                       const VkAllocationCallbacks* pAllocator,
3366                                       VkPipelineCache* pPipelineCache) {
3367         auto device = unbox_VkDevice(boxed_device);
3368         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3369 
3370         VkResult result =
3371             deviceDispatch->vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
3372         if (result != VK_SUCCESS) {
3373             return result;
3374         }
3375 
3376         std::lock_guard<std::recursive_mutex> lock(mLock);
3377 
3378         auto& pipelineCacheInfo = mPipelineCacheInfo[*pPipelineCache];
3379         pipelineCacheInfo.device = device;
3380 
3381         *pPipelineCache = new_boxed_non_dispatchable_VkPipelineCache(*pPipelineCache);
3382 
3383         return result;
3384     }
3385 
destroyPipelineCacheLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)3386     void destroyPipelineCacheLocked(VkDevice device, VulkanDispatch* deviceDispatch,
3387                                     VkPipelineCache pipelineCache,
3388                                     const VkAllocationCallbacks* pAllocator) {
3389         deviceDispatch->vkDestroyPipelineCache(device, pipelineCache, pAllocator);
3390 
3391         mPipelineCacheInfo.erase(pipelineCache);
3392     }
3393 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)3394     void on_vkDestroyPipelineCache(android::base::BumpPool* pool, VkDevice boxed_device,
3395                                    VkPipelineCache pipelineCache,
3396                                    const VkAllocationCallbacks* pAllocator) {
3397         auto device = unbox_VkDevice(boxed_device);
3398         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3399 
3400         std::lock_guard<std::recursive_mutex> lock(mLock);
3401         destroyPipelineCacheLocked(device, deviceDispatch, pipelineCache, pAllocator);
3402     }
3403 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)3404     VkResult on_vkCreateGraphicsPipelines(android::base::BumpPool* pool, VkDevice boxed_device,
3405                                           VkPipelineCache pipelineCache, uint32_t createInfoCount,
3406                                           const VkGraphicsPipelineCreateInfo* pCreateInfos,
3407                                           const VkAllocationCallbacks* pAllocator,
3408                                           VkPipeline* pPipelines) {
3409         auto device = unbox_VkDevice(boxed_device);
3410         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3411 
3412         VkResult result = deviceDispatch->vkCreateGraphicsPipelines(
3413             device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
3414         if (result != VK_SUCCESS && result != VK_PIPELINE_COMPILE_REQUIRED) {
3415             return result;
3416         }
3417 
3418         std::lock_guard<std::recursive_mutex> lock(mLock);
3419 
3420         for (uint32_t i = 0; i < createInfoCount; i++) {
3421             if (!pPipelines[i]) {
3422                 continue;
3423             }
3424             auto& pipelineInfo = mPipelineInfo[pPipelines[i]];
3425             pipelineInfo.device = device;
3426 
3427             pPipelines[i] = new_boxed_non_dispatchable_VkPipeline(pPipelines[i]);
3428         }
3429 
3430         return result;
3431     }
3432 
destroyPipelineLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)3433     void destroyPipelineLocked(VkDevice device, VulkanDispatch* deviceDispatch, VkPipeline pipeline,
3434                                const VkAllocationCallbacks* pAllocator) {
3435         deviceDispatch->vkDestroyPipeline(device, pipeline, pAllocator);
3436 
3437         mPipelineInfo.erase(pipeline);
3438     }
3439 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)3440     void on_vkDestroyPipeline(android::base::BumpPool* pool, VkDevice boxed_device,
3441                               VkPipeline pipeline, const VkAllocationCallbacks* pAllocator) {
3442         auto device = unbox_VkDevice(boxed_device);
3443         auto deviceDispatch = dispatch_VkDevice(boxed_device);
3444 
3445         std::lock_guard<std::recursive_mutex> lock(mLock);
3446         destroyPipelineLocked(device, deviceDispatch, pipeline, pAllocator);
3447     }
3448 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)3449     void on_vkCmdCopyImage(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
3450                            VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage,
3451                            VkImageLayout dstImageLayout, uint32_t regionCount,
3452                            const VkImageCopy* pRegions) {
3453         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3454         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3455 
3456         std::lock_guard<std::recursive_mutex> lock(mLock);
3457         auto* srcImg = android::base::find(mImageInfo, srcImage);
3458         auto* dstImg = android::base::find(mImageInfo, dstImage);
3459         if (!srcImg || !dstImg) return;
3460 
3461         VkDevice device = srcImg->cmpInfo.device();
3462         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3463         if (!deviceInfo) return;
3464 
3465         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
3466         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
3467         if (!needEmulatedSrc && !needEmulatedDst) {
3468             vk->vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
3469                                regionCount, pRegions);
3470             return;
3471         }
3472         VkImage srcImageMip = srcImage;
3473         VkImage dstImageMip = dstImage;
3474         for (uint32_t r = 0; r < regionCount; r++) {
3475             if (needEmulatedSrc) {
3476                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pRegions[r].srcSubresource.mipLevel);
3477             }
3478             if (needEmulatedDst) {
3479                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pRegions[r].dstSubresource.mipLevel);
3480             }
3481             VkImageCopy region = CompressedImageInfo::getCompressedMipmapsImageCopy(
3482                 pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
3483             vk->vkCmdCopyImage(commandBuffer, srcImageMip, srcImageLayout, dstImageMip,
3484                                dstImageLayout, 1, &region);
3485         }
3486     }
3487 
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)3488     void on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
3489                                    VkCommandBuffer boxed_commandBuffer, VkImage srcImage,
3490                                    VkImageLayout srcImageLayout, VkBuffer dstBuffer,
3491                                    uint32_t regionCount, const VkBufferImageCopy* pRegions) {
3492         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3493         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3494 
3495         std::lock_guard<std::recursive_mutex> lock(mLock);
3496         auto* imageInfo = android::base::find(mImageInfo, srcImage);
3497         auto* bufferInfo = android::base::find(mBufferInfo, dstBuffer);
3498         if (!imageInfo || !bufferInfo) return;
3499         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
3500         if (!deviceInfo) return;
3501         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3502         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
3503             vk->vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer,
3504                                        regionCount, pRegions);
3505             return;
3506         }
3507         for (uint32_t r = 0; r < regionCount; r++) {
3508             uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
3509             VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
3510             vk->vkCmdCopyImageToBuffer(commandBuffer, cmpInfo.compressedMipmap(mipLevel),
3511                                        srcImageLayout, dstBuffer, 1, &region);
3512         }
3513     }
3514 
on_vkCmdCopyImage2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)3515     void on_vkCmdCopyImage2(android::base::BumpPool* pool,
3516                            VkCommandBuffer boxed_commandBuffer,
3517                            const VkCopyImageInfo2* pCopyImageInfo) {
3518         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3519         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3520 
3521         std::lock_guard<std::recursive_mutex> lock(mLock);
3522         auto* srcImg = android::base::find(mImageInfo, pCopyImageInfo->srcImage);
3523         auto* dstImg = android::base::find(mImageInfo, pCopyImageInfo->dstImage);
3524         if (!srcImg || !dstImg) return;
3525 
3526         VkDevice device = srcImg->cmpInfo.device();
3527         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3528         if (!deviceInfo) return;
3529 
3530         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
3531         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
3532         if (!needEmulatedSrc && !needEmulatedDst) {
3533             vk->vkCmdCopyImage2(commandBuffer, pCopyImageInfo);
3534             return;
3535         }
3536         VkImage srcImageMip = pCopyImageInfo->srcImage;
3537         VkImage dstImageMip = pCopyImageInfo->dstImage;
3538         for (uint32_t r = 0; r < pCopyImageInfo->regionCount; r++) {
3539             if (needEmulatedSrc) {
3540                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].srcSubresource.mipLevel);
3541             }
3542             if (needEmulatedDst) {
3543                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].dstSubresource.mipLevel);
3544             }
3545 
3546             VkCopyImageInfo2 inf2 = *pCopyImageInfo;
3547             inf2.regionCount = 1;
3548             inf2.srcImage = srcImageMip;
3549             inf2.dstImage = dstImageMip;
3550 
3551             VkImageCopy2 region = CompressedImageInfo::getCompressedMipmapsImageCopy(
3552                 pCopyImageInfo->pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
3553             inf2.pRegions = &region;
3554 
3555             vk->vkCmdCopyImage2(commandBuffer, &inf2);
3556         }
3557     }
3558 
on_vkCmdCopyImageToBuffer2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)3559     void on_vkCmdCopyImageToBuffer2(android::base::BumpPool* pool,
3560                                    VkCommandBuffer boxed_commandBuffer,
3561                                    const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) {
3562         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3563         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3564 
3565         std::lock_guard<std::recursive_mutex> lock(mLock);
3566         auto* imageInfo = android::base::find(mImageInfo, pCopyImageToBufferInfo->srcImage);
3567         auto* bufferInfo = android::base::find(mBufferInfo, pCopyImageToBufferInfo->dstBuffer);
3568         if (!imageInfo || !bufferInfo) return;
3569         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
3570         if (!deviceInfo) return;
3571         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3572         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
3573             vk->vkCmdCopyImageToBuffer2(commandBuffer, pCopyImageToBufferInfo);
3574             return;
3575         }
3576         for (uint32_t r = 0; r < pCopyImageToBufferInfo->regionCount; r++) {
3577             uint32_t mipLevel = pCopyImageToBufferInfo->pRegions[r].imageSubresource.mipLevel;
3578             VkBufferImageCopy2 region = cmpInfo.getBufferImageCopy(pCopyImageToBufferInfo->pRegions[r]);
3579             VkCopyImageToBufferInfo2 inf = *pCopyImageToBufferInfo;
3580             inf.regionCount = 1;
3581             inf.pRegions = &region;
3582             inf.srcImage = cmpInfo.compressedMipmap(mipLevel);
3583 
3584             vk->vkCmdCopyImageToBuffer2(commandBuffer, &inf);
3585         }
3586     }
3587 
on_vkCmdCopyImage2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageInfo2KHR * pCopyImageInfo)3588     void on_vkCmdCopyImage2KHR(android::base::BumpPool* pool,
3589                            VkCommandBuffer boxed_commandBuffer,
3590                            const VkCopyImageInfo2KHR* pCopyImageInfo) {
3591         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3592         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3593 
3594         std::lock_guard<std::recursive_mutex> lock(mLock);
3595         auto* srcImg = android::base::find(mImageInfo, pCopyImageInfo->srcImage);
3596         auto* dstImg = android::base::find(mImageInfo, pCopyImageInfo->dstImage);
3597         if (!srcImg || !dstImg) return;
3598 
3599         VkDevice device = srcImg->cmpInfo.device();
3600         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3601         if (!deviceInfo) return;
3602 
3603         bool needEmulatedSrc = deviceInfo->needEmulatedDecompression(srcImg->cmpInfo);
3604         bool needEmulatedDst = deviceInfo->needEmulatedDecompression(dstImg->cmpInfo);
3605         if (!needEmulatedSrc && !needEmulatedDst) {
3606             vk->vkCmdCopyImage2KHR(commandBuffer, pCopyImageInfo);
3607             return;
3608         }
3609         VkImage srcImageMip = pCopyImageInfo->srcImage;
3610         VkImage dstImageMip = pCopyImageInfo->dstImage;
3611         for (uint32_t r = 0; r < pCopyImageInfo->regionCount; r++) {
3612             if (needEmulatedSrc) {
3613                 srcImageMip = srcImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].srcSubresource.mipLevel);
3614             }
3615             if (needEmulatedDst) {
3616                 dstImageMip = dstImg->cmpInfo.compressedMipmap(pCopyImageInfo->pRegions[r].dstSubresource.mipLevel);
3617             }
3618 
3619             VkCopyImageInfo2KHR inf2 = *pCopyImageInfo;
3620             inf2.regionCount = 1;
3621             inf2.srcImage = srcImageMip;
3622             inf2.dstImage = dstImageMip;
3623 
3624             VkImageCopy2KHR region = CompressedImageInfo::getCompressedMipmapsImageCopy(
3625                 pCopyImageInfo->pRegions[r], srcImg->cmpInfo, dstImg->cmpInfo, needEmulatedSrc, needEmulatedDst);
3626             inf2.pRegions = &region;
3627 
3628             vk->vkCmdCopyImage2KHR(commandBuffer, &inf2);
3629         }
3630     }
3631 
on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo)3632     void on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool* pool,
3633                                    VkCommandBuffer boxed_commandBuffer,
3634                                    const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) {
3635         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3636         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3637 
3638         std::lock_guard<std::recursive_mutex> lock(mLock);
3639         auto* imageInfo = android::base::find(mImageInfo, pCopyImageToBufferInfo->srcImage);
3640         auto* bufferInfo = android::base::find(mBufferInfo, pCopyImageToBufferInfo->dstBuffer);
3641         if (!imageInfo || !bufferInfo) return;
3642         auto* deviceInfo = android::base::find(mDeviceInfo, bufferInfo->device);
3643         if (!deviceInfo) return;
3644         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3645         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
3646             vk->vkCmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo);
3647             return;
3648         }
3649         for (uint32_t r = 0; r < pCopyImageToBufferInfo->regionCount; r++) {
3650             uint32_t mipLevel = pCopyImageToBufferInfo->pRegions[r].imageSubresource.mipLevel;
3651             VkBufferImageCopy2KHR region = cmpInfo.getBufferImageCopy(pCopyImageToBufferInfo->pRegions[r]);
3652             VkCopyImageToBufferInfo2KHR inf = *pCopyImageToBufferInfo;
3653             inf.regionCount = 1;
3654             inf.pRegions = &region;
3655             inf.srcImage = cmpInfo.compressedMipmap(mipLevel);
3656 
3657             vk->vkCmdCopyImageToBuffer2KHR(commandBuffer, &inf);
3658         }
3659     }
3660 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,VkMemoryRequirements * pMemoryRequirements)3661     void on_vkGetImageMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
3662                                          VkImage image, VkMemoryRequirements* pMemoryRequirements) {
3663         auto device = unbox_VkDevice(boxed_device);
3664         auto vk = dispatch_VkDevice(boxed_device);
3665         vk->vkGetImageMemoryRequirements(device, image, pMemoryRequirements);
3666         std::lock_guard<std::recursive_mutex> lock(mLock);
3667         updateImageMemorySizeLocked(device, image, pMemoryRequirements);
3668 
3669         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
3670         if (!physicalDevice) {
3671             ERR("Failed to find physical device for device:%p", device);
3672             return;
3673         }
3674 
3675         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
3676         if (!physicalDeviceInfo) {
3677             ERR("Failed to find physical device info for physical device:%p", *physicalDevice);
3678             return;
3679         }
3680 
3681         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
3682         physicalDeviceMemHelper->transformToGuestMemoryRequirements(pMemoryRequirements);
3683     }
3684 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3685     void on_vkGetImageMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
3686                                           const VkImageMemoryRequirementsInfo2* pInfo,
3687                                           VkMemoryRequirements2* pMemoryRequirements) {
3688         auto device = unbox_VkDevice(boxed_device);
3689         auto vk = dispatch_VkDevice(boxed_device);
3690 
3691         std::lock_guard<std::recursive_mutex> lock(mLock);
3692 
3693         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
3694         if (!physicalDevice) {
3695             ERR("Failed to find physical device for device:%p", device);
3696             return;
3697         }
3698 
3699         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
3700         if (!physicalDeviceInfo) {
3701             ERR("Failed to find physical device info for physical device:%p", *physicalDevice);
3702             return;
3703         }
3704 
3705         if ((physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
3706             vk->vkGetImageMemoryRequirements2) {
3707             vk->vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
3708         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
3709             vk->vkGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
3710         } else {
3711             if (pInfo->pNext) {
3712                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
3713                     "having enabled the extension!");
3714             }
3715 
3716             vk->vkGetImageMemoryRequirements(device, pInfo->image,
3717                                              &pMemoryRequirements->memoryRequirements);
3718         }
3719 
3720         updateImageMemorySizeLocked(device, pInfo->image, &pMemoryRequirements->memoryRequirements);
3721 
3722         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
3723         physicalDeviceMemHelper->transformToGuestMemoryRequirements(
3724             &pMemoryRequirements->memoryRequirements);
3725     }
3726 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice boxed_device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)3727     void on_vkGetBufferMemoryRequirements(android::base::BumpPool* pool, VkDevice boxed_device,
3728                                           VkBuffer buffer,
3729                                           VkMemoryRequirements* pMemoryRequirements) {
3730         auto device = unbox_VkDevice(boxed_device);
3731         auto vk = dispatch_VkDevice(boxed_device);
3732         vk->vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
3733 
3734         std::lock_guard<std::recursive_mutex> lock(mLock);
3735 
3736         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
3737         if (!physicalDevice) {
3738             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3739                 << "No physical device available for " << device;
3740         }
3741 
3742         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
3743         if (!physicalDeviceInfo) {
3744             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3745                 << "No physical device info available for " << *physicalDevice;
3746         }
3747 
3748         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
3749         physicalDeviceMemHelper->transformToGuestMemoryRequirements(pMemoryRequirements);
3750     }
3751 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3752     void on_vkGetBufferMemoryRequirements2(android::base::BumpPool* pool, VkDevice boxed_device,
3753                                            const VkBufferMemoryRequirementsInfo2* pInfo,
3754                                            VkMemoryRequirements2* pMemoryRequirements) {
3755         auto device = unbox_VkDevice(boxed_device);
3756         auto vk = dispatch_VkDevice(boxed_device);
3757 
3758         std::lock_guard<std::recursive_mutex> lock(mLock);
3759 
3760         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
3761         if (!physicalDevice) {
3762             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3763                 << "No physical device available for " << device;
3764         }
3765 
3766         auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
3767         if (!physicalDeviceInfo) {
3768             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
3769                 << "No physical device info available for " << *physicalDevice;
3770         }
3771 
3772         if ((physicalDeviceInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
3773             vk->vkGetBufferMemoryRequirements2) {
3774             vk->vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
3775         } else if (hasDeviceExtension(device, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME)) {
3776             vk->vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
3777         } else {
3778             if (pInfo->pNext) {
3779                 ERR("Warning: trying to use extension struct in VkMemoryRequirements2 without "
3780                     "having enabled the extension!");
3781             }
3782 
3783             vk->vkGetBufferMemoryRequirements(device, pInfo->buffer,
3784                                               &pMemoryRequirements->memoryRequirements);
3785         }
3786 
3787         auto& physicalDeviceMemHelper = physicalDeviceInfo->memoryPropertiesHelper;
3788         physicalDeviceMemHelper->transformToGuestMemoryRequirements(
3789             &pMemoryRequirements->memoryRequirements);
3790     }
3791 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)3792     void on_vkCmdCopyBufferToImage(android::base::BumpPool* pool,
3793                                    VkCommandBuffer boxed_commandBuffer, VkBuffer srcBuffer,
3794                                    VkImage dstImage, VkImageLayout dstImageLayout,
3795                                    uint32_t regionCount, const VkBufferImageCopy* pRegions,
3796                                    const VkDecoderContext& context) {
3797         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3798         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3799 
3800         std::lock_guard<std::recursive_mutex> lock(mLock);
3801         auto* imageInfo = android::base::find(mImageInfo, dstImage);
3802         if (!imageInfo) return;
3803         auto* bufferInfo = android::base::find(mBufferInfo, srcBuffer);
3804         if (!bufferInfo) {
3805             return;
3806         }
3807         VkDevice device = bufferInfo->device;
3808         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3809         if (!deviceInfo) {
3810             return;
3811         }
3812         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
3813             vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout,
3814                                        regionCount, pRegions);
3815             return;
3816         }
3817         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
3818         if (!cmdBufferInfo) {
3819             return;
3820         }
3821         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3822 
3823         for (uint32_t r = 0; r < regionCount; r++) {
3824             uint32_t mipLevel = pRegions[r].imageSubresource.mipLevel;
3825             VkBufferImageCopy region = cmpInfo.getBufferImageCopy(pRegions[r]);
3826             vk->vkCmdCopyBufferToImage(commandBuffer, srcBuffer, cmpInfo.compressedMipmap(mipLevel),
3827                                        dstImageLayout, 1, &region);
3828         }
3829 
3830         if (cmpInfo.canDecompressOnCpu()) {
3831             // Get a pointer to the compressed image memory
3832             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
3833             if (!memoryInfo) {
3834                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
3835                 return;
3836             }
3837             if (!memoryInfo->ptr) {
3838                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
3839                 return;
3840             }
3841             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
3842             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, dstImage,
3843                                     dstImageLayout, regionCount, pRegions, context);
3844         }
3845     }
3846 
on_vkCmdCopyBufferToImage2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo,const VkDecoderContext & context)3847     void on_vkCmdCopyBufferToImage2(android::base::BumpPool* pool,
3848                                     VkCommandBuffer boxed_commandBuffer,
3849                                     const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo,
3850                                     const VkDecoderContext& context) {
3851         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3852         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3853 
3854         std::lock_guard<std::recursive_mutex> lock(mLock);
3855         auto* imageInfo = android::base::find(mImageInfo, pCopyBufferToImageInfo->dstImage);
3856         if (!imageInfo) return;
3857         auto* bufferInfo = android::base::find(mBufferInfo, pCopyBufferToImageInfo->srcBuffer);
3858         if (!bufferInfo) {
3859             return;
3860         }
3861         VkDevice device = bufferInfo->device;
3862         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3863         if (!deviceInfo) {
3864             return;
3865         }
3866         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
3867             vk->vkCmdCopyBufferToImage2(commandBuffer, pCopyBufferToImageInfo);
3868             return;
3869         }
3870         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
3871         if (!cmdBufferInfo) {
3872             return;
3873         }
3874         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3875 
3876         for (uint32_t r = 0; r < pCopyBufferToImageInfo->regionCount; r++) {
3877             VkCopyBufferToImageInfo2 inf;
3878             uint32_t mipLevel = pCopyBufferToImageInfo->pRegions[r].imageSubresource.mipLevel;
3879             inf.dstImage = cmpInfo.compressedMipmap(mipLevel);
3880             VkBufferImageCopy2 region = cmpInfo.getBufferImageCopy(pCopyBufferToImageInfo->pRegions[r]);
3881             inf.regionCount = 1;
3882             inf.pRegions = &region;
3883 
3884             vk->vkCmdCopyBufferToImage2(commandBuffer, &inf);
3885         }
3886 
3887         if (cmpInfo.canDecompressOnCpu()) {
3888             // Get a pointer to the compressed image memory
3889             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
3890             if (!memoryInfo) {
3891                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
3892                 return;
3893             }
3894             if (!memoryInfo->ptr) {
3895                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
3896                 return;
3897             }
3898             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
3899 
3900             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, pCopyBufferToImageInfo, context);
3901         }
3902     }
3903 
on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,const VkDecoderContext & context)3904     void on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool* pool,
3905                                     VkCommandBuffer boxed_commandBuffer,
3906                                     const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo,
3907                                     const VkDecoderContext& context) {
3908         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3909         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3910 
3911         std::lock_guard<std::recursive_mutex> lock(mLock);
3912         auto* imageInfo = android::base::find(mImageInfo, pCopyBufferToImageInfo->dstImage);
3913         if (!imageInfo) return;
3914         auto* bufferInfo = android::base::find(mBufferInfo, pCopyBufferToImageInfo->srcBuffer);
3915         if (!bufferInfo) {
3916             return;
3917         }
3918         VkDevice device = bufferInfo->device;
3919         auto* deviceInfo = android::base::find(mDeviceInfo, device);
3920         if (!deviceInfo) {
3921             return;
3922         }
3923         if (!deviceInfo->needEmulatedDecompression(imageInfo->cmpInfo)) {
3924             vk->vkCmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo);
3925             return;
3926         }
3927         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
3928         if (!cmdBufferInfo) {
3929             return;
3930         }
3931         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
3932 
3933         for (uint32_t r = 0; r < pCopyBufferToImageInfo->regionCount; r++) {
3934             VkCopyBufferToImageInfo2KHR inf;
3935             uint32_t mipLevel = pCopyBufferToImageInfo->pRegions[r].imageSubresource.mipLevel;
3936             inf.dstImage = cmpInfo.compressedMipmap(mipLevel);
3937             VkBufferImageCopy2KHR region = cmpInfo.getBufferImageCopy(pCopyBufferToImageInfo->pRegions[r]);
3938             inf.regionCount = 1;
3939             inf.pRegions = &region;
3940 
3941             vk->vkCmdCopyBufferToImage2KHR(commandBuffer, &inf);
3942         }
3943 
3944         if (cmpInfo.canDecompressOnCpu()) {
3945             // Get a pointer to the compressed image memory
3946             const MemoryInfo* memoryInfo = android::base::find(mMemoryInfo, bufferInfo->memory);
3947             if (!memoryInfo) {
3948                 WARN("ASTC CPU decompression: couldn't find mapped memory info");
3949                 return;
3950             }
3951             if (!memoryInfo->ptr) {
3952                 WARN("ASTC CPU decompression: VkBuffer memory isn't host-visible");
3953                 return;
3954             }
3955             uint8_t* astcData = (uint8_t*)(memoryInfo->ptr) + bufferInfo->memoryOffset;
3956 
3957             cmpInfo.decompressOnCpu(commandBuffer, astcData, bufferInfo->size, pCopyBufferToImageInfo, context);
3958         }
3959     }
3960 
convertQueueFamilyForeignToExternal(uint32_t * queueFamilyIndexPtr)3961     inline void convertQueueFamilyForeignToExternal(uint32_t* queueFamilyIndexPtr) {
3962         if (*queueFamilyIndexPtr == VK_QUEUE_FAMILY_FOREIGN_EXT) {
3963             *queueFamilyIndexPtr = VK_QUEUE_FAMILY_EXTERNAL;
3964         }
3965     }
3966 
convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(VkBufferMemoryBarrier * barrier)3967     inline void convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
3968         VkBufferMemoryBarrier* barrier) {
3969         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
3970         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
3971     }
3972 
convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(VkImageMemoryBarrier * barrier)3973     inline void convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
3974         VkImageMemoryBarrier* barrier) {
3975         convertQueueFamilyForeignToExternal(&barrier->srcQueueFamilyIndex);
3976         convertQueueFamilyForeignToExternal(&barrier->dstQueueFamilyIndex);
3977     }
3978 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)3979     void on_vkCmdPipelineBarrier(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
3980                                  VkPipelineStageFlags srcStageMask,
3981                                  VkPipelineStageFlags dstStageMask,
3982                                  VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount,
3983                                  const VkMemoryBarrier* pMemoryBarriers,
3984                                  uint32_t bufferMemoryBarrierCount,
3985                                  const VkBufferMemoryBarrier* pBufferMemoryBarriers,
3986                                  uint32_t imageMemoryBarrierCount,
3987                                  const VkImageMemoryBarrier* pImageMemoryBarriers) {
3988         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
3989         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
3990 
3991         for (uint32_t i = 0; i < bufferMemoryBarrierCount; ++i) {
3992             convertQueueFamilyForeignToExternal_VkBufferMemoryBarrier(
3993                 ((VkBufferMemoryBarrier*)pBufferMemoryBarriers) + i);
3994         }
3995 
3996         for (uint32_t i = 0; i < imageMemoryBarrierCount; ++i) {
3997             convertQueueFamilyForeignToExternal_VkImageMemoryBarrier(
3998                 ((VkImageMemoryBarrier*)pImageMemoryBarriers) + i);
3999         }
4000 
4001         if (imageMemoryBarrierCount == 0) {
4002             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
4003                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
4004                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
4005                                      pImageMemoryBarriers);
4006             return;
4007         }
4008         std::lock_guard<std::recursive_mutex> lock(mLock);
4009         CommandBufferInfo* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
4010         if (!cmdBufferInfo) return;
4011 
4012         DeviceInfo* deviceInfo = android::base::find(mDeviceInfo, cmdBufferInfo->device);
4013         if (!deviceInfo) return;
4014 
4015         // TODO: update image layout in ImageInfo
4016         for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
4017             const VkImageMemoryBarrier& barrier = pImageMemoryBarriers[i];
4018             auto* imageInfo = android::base::find(mImageInfo, barrier.image);
4019             if (!imageInfo) {
4020                 continue;
4021             }
4022             cmdBufferInfo->imageLayouts[barrier.image] = barrier.newLayout;
4023             if (!imageInfo->boundColorBuffer.has_value()) {
4024                 continue;
4025             }
4026             HandleType cb = imageInfo->boundColorBuffer.value();
4027             if (barrier.srcQueueFamilyIndex == VK_QUEUE_FAMILY_EXTERNAL) {
4028                 cmdBufferInfo->acquiredColorBuffers.insert(cb);
4029             }
4030             if (barrier.dstQueueFamilyIndex == VK_QUEUE_FAMILY_EXTERNAL) {
4031                 cmdBufferInfo->releasedColorBuffers.insert(cb);
4032             }
4033             cmdBufferInfo->cbLayouts[cb] = barrier.newLayout;
4034         }
4035 
4036         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
4037             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
4038                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
4039                                      pBufferMemoryBarriers, imageMemoryBarrierCount,
4040                                      pImageMemoryBarriers);
4041             return;
4042         }
4043 
4044         // This is a compressed image. Handle decompression before calling vkCmdPipelineBarrier
4045 
4046         std::vector<VkImageMemoryBarrier> imageBarriers;
4047         bool needRebind = false;
4048 
4049         for (uint32_t i = 0; i < imageMemoryBarrierCount; i++) {
4050             const VkImageMemoryBarrier& srcBarrier = pImageMemoryBarriers[i];
4051             auto* imageInfo = android::base::find(mImageInfo, srcBarrier.image);
4052 
4053             // If the image doesn't need GPU decompression, nothing to do.
4054             if (!imageInfo || !deviceInfo->needGpuDecompression(imageInfo->cmpInfo)) {
4055                 imageBarriers.push_back(srcBarrier);
4056                 continue;
4057             }
4058 
4059             // Otherwise, decompress the image, if we're going to read from it.
4060             needRebind |= imageInfo->cmpInfo.decompressIfNeeded(
4061                 vk, commandBuffer, srcStageMask, dstStageMask, srcBarrier, imageBarriers);
4062         }
4063 
4064         if (needRebind && cmdBufferInfo->computePipeline) {
4065             // Recover pipeline bindings
4066             // TODO(gregschlom): instead of doing this here again and again after each image we
4067             // decompress, could we do it once before calling vkCmdDispatch?
4068             vk->vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE,
4069                                   cmdBufferInfo->computePipeline);
4070             if (!cmdBufferInfo->descriptorSets.empty()) {
4071                 vk->vkCmdBindDescriptorSets(
4072                     commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, cmdBufferInfo->descriptorLayout,
4073                     cmdBufferInfo->firstSet, cmdBufferInfo->descriptorSets.size(),
4074                     cmdBufferInfo->descriptorSets.data(), cmdBufferInfo->dynamicOffsets.size(),
4075                     cmdBufferInfo->dynamicOffsets.data());
4076             }
4077         }
4078 
4079         // Apply the remaining barriers
4080         if (memoryBarrierCount || bufferMemoryBarrierCount || !imageBarriers.empty()) {
4081             vk->vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
4082                                      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
4083                                      pBufferMemoryBarriers, imageBarriers.size(),
4084                                      imageBarriers.data());
4085         }
4086     }
4087 
mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch * vk,VkDevice device,VkDeviceMemory memory,uint64_t physAddr)4088     bool mapHostVisibleMemoryToGuestPhysicalAddressLocked(VulkanDispatch* vk, VkDevice device,
4089                                                           VkDeviceMemory memory,
4090                                                           uint64_t physAddr) {
4091         if (!m_emu->features.GlDirectMem.enabled &&
4092             !m_emu->features.VirtioGpuNext.enabled) {
4093             // fprintf(stderr, "%s: Tried to use direct mapping "
4094             // "while GlDirectMem is not enabled!\n");
4095         }
4096 
4097         auto* info = android::base::find(mMemoryInfo, memory);
4098         if (!info) return false;
4099 
4100         info->guestPhysAddr = physAddr;
4101 
4102         constexpr size_t kPageBits = 12;
4103         constexpr size_t kPageSize = 1u << kPageBits;
4104         constexpr size_t kPageOffsetMask = kPageSize - 1;
4105 
4106         uintptr_t addr = reinterpret_cast<uintptr_t>(info->ptr);
4107         uintptr_t pageOffset = addr & kPageOffsetMask;
4108 
4109         info->pageAlignedHva = reinterpret_cast<void*>(addr - pageOffset);
4110         info->sizeToPage = ((info->size + pageOffset + kPageSize - 1) >> kPageBits) << kPageBits;
4111 
4112         if (mLogging) {
4113             fprintf(stderr, "%s: map: %p, %p -> [0x%llx 0x%llx]\n", __func__, info->ptr,
4114                     info->pageAlignedHva, (unsigned long long)info->guestPhysAddr,
4115                     (unsigned long long)info->guestPhysAddr + info->sizeToPage);
4116         }
4117 
4118         info->directMapped = true;
4119         uint64_t gpa = info->guestPhysAddr;
4120         void* hva = info->pageAlignedHva;
4121         size_t sizeToPage = info->sizeToPage;
4122 
4123         AutoLock occupiedGpasLock(mOccupiedGpasLock);
4124 
4125         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
4126         if (existingMemoryInfo) {
4127             fprintf(stderr, "%s: WARNING: already mapped gpa 0x%llx, replacing", __func__,
4128                     (unsigned long long)gpa);
4129 
4130             get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
4131                                                          existingMemoryInfo->sizeToPage);
4132 
4133             mOccupiedGpas.erase(gpa);
4134         }
4135 
4136         get_emugl_vm_operations().mapUserBackedRam(gpa, hva, sizeToPage);
4137 
4138         if (mVerbosePrints) {
4139             fprintf(stderr, "VERBOSE:%s: registering gpa 0x%llx to mOccupiedGpas\n", __func__,
4140                     (unsigned long long)gpa);
4141         }
4142 
4143         mOccupiedGpas[gpa] = {
4144             vk, device, memory, gpa, sizeToPage,
4145         };
4146 
4147         if (!mUseOldMemoryCleanupPath) {
4148             get_emugl_address_space_device_control_ops().register_deallocation_callback(
4149                 this, gpa, [](void* thisPtr, uint64_t gpa) {
4150                     Impl* implPtr = (Impl*)thisPtr;
4151                     implPtr->unmapMemoryAtGpaIfExists(gpa);
4152                 });
4153         }
4154 
4155         return true;
4156     }
4157 
4158     // Only call this from the address space device deallocation operation's
4159     // context, or it's possible that the guest/host view of which gpa's are
4160     // occupied goes out of sync.
unmapMemoryAtGpaIfExists(uint64_t gpa)4161     void unmapMemoryAtGpaIfExists(uint64_t gpa) {
4162         AutoLock lock(mOccupiedGpasLock);
4163 
4164         if (mVerbosePrints) {
4165             fprintf(stderr, "VERBOSE:%s: deallocation callback for gpa 0x%llx\n", __func__,
4166                     (unsigned long long)gpa);
4167         }
4168 
4169         auto* existingMemoryInfo = android::base::find(mOccupiedGpas, gpa);
4170         if (!existingMemoryInfo) return;
4171 
4172         get_emugl_vm_operations().unmapUserBackedRam(existingMemoryInfo->gpa,
4173                                                      existingMemoryInfo->sizeToPage);
4174 
4175         mOccupiedGpas.erase(gpa);
4176     }
4177 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice boxed_device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)4178     VkResult on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice boxed_device,
4179                                  const VkMemoryAllocateInfo* pAllocateInfo,
4180                                  const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory) {
4181         auto device = unbox_VkDevice(boxed_device);
4182         auto vk = dispatch_VkDevice(boxed_device);
4183         auto* tInfo = RenderThreadInfoVk::get();
4184 
4185         if (!pAllocateInfo) return VK_ERROR_INITIALIZATION_FAILED;
4186 
4187         VkMemoryAllocateInfo localAllocInfo = vk_make_orphan_copy(*pAllocateInfo);
4188         vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localAllocInfo);
4189 
4190         VkMemoryAllocateFlagsInfo allocFlagsInfo;
4191         VkMemoryOpaqueCaptureAddressAllocateInfo opaqueCaptureAddressAllocInfo;
4192 
4193         const VkMemoryAllocateFlagsInfo* allocFlagsInfoPtr =
4194             vk_find_struct<VkMemoryAllocateFlagsInfo>(pAllocateInfo);
4195         const VkMemoryOpaqueCaptureAddressAllocateInfo* opaqueCaptureAddressAllocInfoPtr =
4196             vk_find_struct<VkMemoryOpaqueCaptureAddressAllocateInfo>(pAllocateInfo);
4197 
4198         if (allocFlagsInfoPtr) {
4199             allocFlagsInfo = *allocFlagsInfoPtr;
4200             vk_append_struct(&structChainIter, &allocFlagsInfo);
4201         }
4202 
4203         if (opaqueCaptureAddressAllocInfoPtr) {
4204             opaqueCaptureAddressAllocInfo = *opaqueCaptureAddressAllocInfoPtr;
4205             vk_append_struct(&structChainIter, &opaqueCaptureAddressAllocInfo);
4206         }
4207 
4208         const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
4209             vk_find_struct<VkMemoryDedicatedAllocateInfo>(pAllocateInfo);
4210         VkMemoryDedicatedAllocateInfo localDedicatedAllocInfo;
4211 
4212         if (dedicatedAllocInfoPtr) {
4213             localDedicatedAllocInfo = vk_make_orphan_copy(*dedicatedAllocInfoPtr);
4214         }
4215         if (!usingDirectMapping()) {
4216             // We copy bytes 1 page at a time from the guest to the host
4217             // if we are not using direct mapping. This means we can end up
4218             // writing over memory we did not intend.
4219             // E.g. swiftshader just allocated with malloc, which can have
4220             // data stored between allocations.
4221         #ifdef PAGE_SIZE
4222             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(PAGE_SIZE);
4223             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(PAGE_SIZE - 1);
4224         #elif defined(_WIN32)
4225             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(4096);
4226             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(4095);
4227         #else
4228             localAllocInfo.allocationSize += static_cast<VkDeviceSize>(getpagesize());
4229             localAllocInfo.allocationSize &= ~static_cast<VkDeviceSize>(getpagesize() - 1);
4230         #endif
4231         }
4232         // Note for AHardwareBuffers, the Vulkan spec states:
4233         //
4234         //     Android hardware buffers have intrinsic width, height, format, and usage
4235         //     properties, so Vulkan images bound to memory imported from an Android
4236         //     hardware buffer must use dedicated allocations
4237         //
4238         // so any allocation requests with a VkImportAndroidHardwareBufferInfoANDROID
4239         // will necessarily have a VkMemoryDedicatedAllocateInfo. However, the host
4240         // may or may not actually use a dedicated allocations during Buffer/ColorBuffer
4241         // setup. Below checks if the underlying Buffer/ColorBuffer backing memory was
4242         // originally created with a dedicated allocation.
4243         bool shouldUseDedicatedAllocInfo = dedicatedAllocInfoPtr != nullptr;
4244 
4245         const VkImportColorBufferGOOGLE* importCbInfoPtr =
4246             vk_find_struct<VkImportColorBufferGOOGLE>(pAllocateInfo);
4247         const VkImportBufferGOOGLE* importBufferInfoPtr =
4248             vk_find_struct<VkImportBufferGOOGLE>(pAllocateInfo);
4249 
4250         const VkCreateBlobGOOGLE* createBlobInfoPtr =
4251             vk_find_struct<VkCreateBlobGOOGLE>(pAllocateInfo);
4252 
4253 #ifdef _WIN32
4254         VkImportMemoryWin32HandleInfoKHR importInfo{
4255             VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
4256             0,
4257             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
4258             VK_EXT_MEMORY_HANDLE_INVALID,
4259             L"",
4260         };
4261 #elif defined(__QNX__)
4262         VkImportScreenBufferInfoQNX importInfo{
4263             VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX,
4264             0,
4265             VK_EXT_MEMORY_HANDLE_INVALID,
4266         };
4267 #else
4268         VkImportMemoryFdInfoKHR importInfo{
4269             VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
4270             0,
4271             VK_EXT_MEMORY_HANDLE_TYPE_BIT,
4272             VK_EXT_MEMORY_HANDLE_INVALID,
4273         };
4274 #endif
4275 
4276         void* mappedPtr = nullptr;
4277         ManagedDescriptor externalMemoryHandle;
4278         if (importCbInfoPtr) {
4279             bool vulkanOnly = mGuestUsesAngle;
4280 
4281             bool colorBufferMemoryUsesDedicatedAlloc = false;
4282             if (!getColorBufferAllocationInfo(importCbInfoPtr->colorBuffer,
4283                                               &localAllocInfo.allocationSize,
4284                                               &localAllocInfo.memoryTypeIndex,
4285                                               &colorBufferMemoryUsesDedicatedAlloc, &mappedPtr)) {
4286                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4287                     << "Failed to get allocation info for ColorBuffer:"
4288                     << importCbInfoPtr->colorBuffer;
4289             }
4290 
4291             shouldUseDedicatedAllocInfo &= colorBufferMemoryUsesDedicatedAlloc;
4292 
4293             if (!vulkanOnly) {
4294                 auto fb = FrameBuffer::getFB();
4295                 if (fb) {
4296                     fb->invalidateColorBufferForVk(importCbInfoPtr->colorBuffer);
4297                 }
4298             }
4299 
4300             if (m_emu->instanceSupportsExternalMemoryCapabilities) {
4301                 VK_EXT_MEMORY_HANDLE cbExtMemoryHandle =
4302                     getColorBufferExtMemoryHandle(importCbInfoPtr->colorBuffer);
4303 
4304                 if (cbExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
4305                     fprintf(stderr,
4306                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
4307                             "colorBuffer 0x%x does not have Vulkan external memory backing\n",
4308                             __func__, importCbInfoPtr->colorBuffer);
4309                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4310                 }
4311 
4312 #if defined(__QNX__)
4313                 importInfo.buffer = cbExtMemoryHandle;
4314 #else
4315                 externalMemoryHandle = ManagedDescriptor(dupExternalMemory(cbExtMemoryHandle));
4316 
4317 #ifdef _WIN32
4318                 importInfo.handle = externalMemoryHandle.get().value_or(static_cast<HANDLE>(NULL));
4319 #else
4320                 importInfo.fd = externalMemoryHandle.get().value_or(-1);
4321 #endif
4322 #endif
4323                 vk_append_struct(&structChainIter, &importInfo);
4324             }
4325         }
4326 
4327         if (importBufferInfoPtr) {
4328             bool bufferMemoryUsesDedicatedAlloc = false;
4329             if (!getBufferAllocationInfo(
4330                     importBufferInfoPtr->buffer, &localAllocInfo.allocationSize,
4331                     &localAllocInfo.memoryTypeIndex, &bufferMemoryUsesDedicatedAlloc)) {
4332                 ERR("Failed to get Buffer:%d allocation info.", importBufferInfoPtr->buffer);
4333                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4334             }
4335 
4336             shouldUseDedicatedAllocInfo &= bufferMemoryUsesDedicatedAlloc;
4337 
4338             if (m_emu->instanceSupportsExternalMemoryCapabilities) {
4339                 VK_EXT_MEMORY_HANDLE bufferExtMemoryHandle =
4340                     getBufferExtMemoryHandle(importBufferInfoPtr->buffer);
4341 
4342                 if (bufferExtMemoryHandle == VK_EXT_MEMORY_HANDLE_INVALID) {
4343                     fprintf(stderr,
4344                             "%s: VK_ERROR_OUT_OF_DEVICE_MEMORY: "
4345                             "buffer 0x%x does not have Vulkan external memory "
4346                             "backing\n",
4347                             __func__, importBufferInfoPtr->buffer);
4348                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4349                 }
4350 
4351 #if defined(__QNX__)
4352                 importInfo.buffer = bufferExtMemoryHandle;
4353 #else
4354                 bufferExtMemoryHandle = dupExternalMemory(bufferExtMemoryHandle);
4355 
4356 #ifdef _WIN32
4357                 importInfo.handle = bufferExtMemoryHandle;
4358 #else
4359                 importInfo.fd = bufferExtMemoryHandle;
4360 #endif
4361 #endif
4362                 vk_append_struct(&structChainIter, &importInfo);
4363             }
4364         }
4365 
4366         VkMemoryPropertyFlags memoryPropertyFlags;
4367 
4368         // Map guest memory index to host memory index and lookup memory properties:
4369         {
4370             std::lock_guard<std::recursive_mutex> lock(mLock);
4371 
4372             auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
4373             if (!physicalDevice) {
4374                 // User app gave an invalid VkDevice, but we don't really want to crash here.
4375                 // We should allow invalid apps.
4376                 return VK_ERROR_DEVICE_LOST;
4377             }
4378             auto* physicalDeviceInfo = android::base::find(mPhysdevInfo, *physicalDevice);
4379             if (!physicalDeviceInfo) {
4380                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
4381                     << "No physical device info available for " << *physicalDevice;
4382             }
4383 
4384             const auto hostMemoryInfoOpt =
4385                 physicalDeviceInfo->memoryPropertiesHelper
4386                     ->getHostMemoryInfoFromGuestMemoryTypeIndex(localAllocInfo.memoryTypeIndex);
4387             if (!hostMemoryInfoOpt) {
4388                 return VK_ERROR_INCOMPATIBLE_DRIVER;
4389             }
4390             const auto& hostMemoryInfo = *hostMemoryInfoOpt;
4391 
4392             localAllocInfo.memoryTypeIndex = hostMemoryInfo.index;
4393             memoryPropertyFlags = hostMemoryInfo.memoryType.propertyFlags;
4394         }
4395 
4396         if (shouldUseDedicatedAllocInfo) {
4397             vk_append_struct(&structChainIter, &localDedicatedAllocInfo);
4398         }
4399 
4400         VkExportMemoryAllocateInfo exportAllocate = {
4401             .sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
4402             .pNext = NULL,
4403         };
4404 
4405 #ifdef __unix__
4406         exportAllocate.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT;
4407 #endif
4408 
4409 #ifdef __linux__
4410         if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
4411             exportAllocate.handleTypes |= VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
4412         }
4413 #endif
4414 
4415 #ifdef _WIN32
4416         exportAllocate.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT;
4417 #endif
4418 
4419         bool hostVisible = memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
4420         if (hostVisible && m_emu->features.ExternalBlob.enabled) {
4421             vk_append_struct(&structChainIter, &exportAllocate);
4422         }
4423 
4424         if (createBlobInfoPtr && createBlobInfoPtr->blobMem == STREAM_BLOB_MEM_GUEST &&
4425             (createBlobInfoPtr->blobFlags & STREAM_BLOB_FLAG_CREATE_GUEST_HANDLE)) {
4426             DescriptorType rawDescriptor;
4427             uint32_t ctx_id = mSnapshotState == SnapshotState::Loading
4428                                   ? kTemporaryContextIdForSnapshotLoading
4429                                   : tInfo->ctx_id;
4430             auto descriptorInfoOpt =
4431                 BlobManager::get()->removeDescriptorInfo(ctx_id, createBlobInfoPtr->blobId);
4432             if (descriptorInfoOpt) {
4433                 auto rawDescriptorOpt = (*descriptorInfoOpt).descriptor.release();
4434                 if (rawDescriptorOpt) {
4435                     rawDescriptor = *rawDescriptorOpt;
4436                 } else {
4437                     ERR("Failed vkAllocateMemory: missing raw descriptor.");
4438                     return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4439                 }
4440             } else {
4441                 ERR("Failed vkAllocateMemory: missing descriptor info.");
4442                 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4443             }
4444 #if defined(__linux__)
4445             importInfo.fd = rawDescriptor;
4446 #endif
4447 
4448 #ifdef __linux__
4449             if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
4450                 importInfo.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
4451             }
4452 #endif
4453             vk_append_struct(&structChainIter, &importInfo);
4454         }
4455 
4456         VkImportMemoryHostPointerInfoEXT importHostInfo;
4457         std::optional<SharedMemory> sharedMemory = std::nullopt;
4458         std::shared_ptr<PrivateMemory> privateMemory = {};
4459 
4460         // TODO(b/261222354): Make sure the feature exists when initializing sVkEmulation.
4461         if (hostVisible && m_emu->features.SystemBlob.enabled) {
4462             // Ensure size is page-aligned.
4463             VkDeviceSize alignedSize = __ALIGN(localAllocInfo.allocationSize, kPageSizeforBlob);
4464             if (alignedSize != localAllocInfo.allocationSize) {
4465                 ERR("Warning: Aligning allocation size from %llu to %llu",
4466                     static_cast<unsigned long long>(localAllocInfo.allocationSize),
4467                     static_cast<unsigned long long>(alignedSize));
4468             }
4469             localAllocInfo.allocationSize = alignedSize;
4470 
4471             static std::atomic<uint64_t> uniqueShmemId = 0;
4472             sharedMemory = SharedMemory("shared-memory-vk-" + std::to_string(uniqueShmemId++),
4473                                         localAllocInfo.allocationSize);
4474             int ret = sharedMemory->create(0600);
4475             if (ret) {
4476                 ERR("Failed to create system-blob host-visible memory, error: %d", ret);
4477                 return VK_ERROR_OUT_OF_HOST_MEMORY;
4478             }
4479             mappedPtr = sharedMemory->get();
4480             int mappedPtrAlignment = reinterpret_cast<uintptr_t>(mappedPtr) % kPageSizeforBlob;
4481             if (mappedPtrAlignment != 0) {
4482                 ERR("Warning: Mapped shared memory pointer is not aligned to page size, alignment "
4483                     "is: %d",
4484                     mappedPtrAlignment);
4485             }
4486             importHostInfo = {.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
4487                               .pNext = NULL,
4488                               .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
4489                               .pHostPointer = mappedPtr};
4490             vk_append_struct(&structChainIter, &importHostInfo);
4491         }
4492 
4493         VkImportMemoryHostPointerInfoEXT importHostInfoPrivate{};
4494         if (hostVisible && m_emu->features.VulkanAllocateHostMemory.enabled &&
4495             localAllocInfo.pNext == nullptr) {
4496             VkDeviceSize alignedSize = __ALIGN(localAllocInfo.allocationSize, kPageSizeforBlob);
4497             localAllocInfo.allocationSize = alignedSize;
4498             privateMemory =
4499                 std::make_shared<PrivateMemory>(kPageSizeforBlob, localAllocInfo.allocationSize);
4500             mappedPtr = privateMemory->getAddr();
4501             importHostInfoPrivate = {
4502                 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
4503                 .pNext = NULL,
4504                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
4505                 .pHostPointer = mappedPtr};
4506             vk_append_struct(&structChainIter, &importHostInfoPrivate);
4507         }
4508 
4509         VkResult result = vk->vkAllocateMemory(device, &localAllocInfo, pAllocator, pMemory);
4510 
4511         if (result != VK_SUCCESS) {
4512             return result;
4513         }
4514 
4515 #ifdef _WIN32
4516         // Let ManagedDescriptor to close the underlying HANDLE when going out of scope. From the
4517         // VkImportMemoryWin32HandleInfoKHR spec: Importing memory object payloads from Windows
4518         // handles does not transfer ownership of the handle to the Vulkan implementation. For
4519         // handle types defined as NT handles, the application must release handle ownership using
4520         // the CloseHandle system call when the handle is no longer needed. For handle types defined
4521         // as NT handles, the imported memory object holds a reference to its payload.
4522 #else
4523         // Tell ManagedDescriptor not to close the underlying fd, because the ownership has already
4524         // been transferred to the Vulkan implementation. From VkImportMemoryFdInfoKHR spec:
4525         // Importing memory from a file descriptor transfers ownership of the file descriptor from
4526         // the application to the Vulkan implementation. The application must not perform any
4527         // operations on the file descriptor after a successful import. The imported memory object
4528         // holds a reference to its payload.
4529         externalMemoryHandle.release();
4530 #endif
4531 
4532         std::lock_guard<std::recursive_mutex> lock(mLock);
4533 
4534         mMemoryInfo[*pMemory] = MemoryInfo();
4535         auto& memoryInfo = mMemoryInfo[*pMemory];
4536         memoryInfo.size = localAllocInfo.allocationSize;
4537         memoryInfo.device = device;
4538         memoryInfo.memoryIndex = localAllocInfo.memoryTypeIndex;
4539 #if defined(__APPLE__) && defined(VK_MVK_moltenvk)
4540         if (importCbInfoPtr && m_emu->instanceSupportsMoltenVK) {
4541             memoryInfo.mtlTexture = getColorBufferMTLTexture(importCbInfoPtr->colorBuffer);
4542         }
4543 #endif
4544 
4545         if (importCbInfoPtr && !mGuestUsesAngle) {
4546             memoryInfo.boundColorBuffer = importCbInfoPtr->colorBuffer;
4547         }
4548 
4549         if (!hostVisible) {
4550             *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
4551             return result;
4552         }
4553 
4554         if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) {
4555             memoryInfo.caching = MAP_CACHE_CACHED;
4556         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD) {
4557             memoryInfo.caching = MAP_CACHE_UNCACHED;
4558         } else if (memoryPropertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) {
4559             memoryInfo.caching = MAP_CACHE_WC;
4560         }
4561 
4562         VkInstance* instance = deviceToInstanceLocked(device);
4563         InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
4564         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4565         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
4566 
4567         // If gfxstream needs to be able to read from this memory, needToMap should be true.
4568         // When external blobs are off, we always want to map HOST_VISIBLE memory. Because, we run
4569         // in the same process as the guest.
4570         // When external blobs are on, we want to map memory only if a workaround is using it in
4571         // the gfxstream process. This happens when ASTC CPU emulation is on.
4572         bool needToMap =
4573             (!m_emu->features.ExternalBlob.enabled ||
4574              (deviceInfo->useAstcCpuDecompression && deviceInfo->emulateTextureAstc)) &&
4575             !createBlobInfoPtr;
4576 
4577         // Some cases provide a mappedPtr, so we only map if we still don't have a pointer here.
4578         if (!mappedPtr && needToMap) {
4579             memoryInfo.needUnmap = true;
4580             VkResult mapResult =
4581                 vk->vkMapMemory(device, *pMemory, 0, memoryInfo.size, 0, &memoryInfo.ptr);
4582             if (mapResult != VK_SUCCESS) {
4583                 freeMemoryLocked(vk, device, *pMemory, pAllocator);
4584                 *pMemory = VK_NULL_HANDLE;
4585                 return VK_ERROR_OUT_OF_HOST_MEMORY;
4586             }
4587         } else {
4588             // Since we didn't call vkMapMemory, unmapping is not needed (don't own mappedPtr).
4589             memoryInfo.needUnmap = false;
4590             memoryInfo.ptr = mappedPtr;
4591 
4592             if (createBlobInfoPtr) {
4593                 memoryInfo.blobId = createBlobInfoPtr->blobId;
4594             }
4595 
4596             // Always assign the shared memory into memoryInfo. If it was used, then it will have
4597             // ownership transferred.
4598             memoryInfo.sharedMemory = std::exchange(sharedMemory, std::nullopt);
4599 
4600             memoryInfo.privateMemory = privateMemory;
4601         }
4602 
4603         *pMemory = new_boxed_non_dispatchable_VkDeviceMemory(*pMemory);
4604 
4605         return result;
4606     }
4607 
freeMemoryLocked(VulkanDispatch * vk,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)4608     void freeMemoryLocked(VulkanDispatch* vk, VkDevice device, VkDeviceMemory memory,
4609                           const VkAllocationCallbacks* pAllocator) {
4610         auto* info = android::base::find(mMemoryInfo, memory);
4611         if (!info) return;  // Invalid usage.
4612 
4613 #ifdef __APPLE__
4614         if (info->mtlTexture) {
4615             CFRelease(info->mtlTexture);
4616             info->mtlTexture = nullptr;
4617         }
4618 #endif
4619 
4620         if (info->directMapped) {
4621             // if direct mapped, we leave it up to the guest address space driver
4622             // to control the unmapping of kvm slot on the host side
4623             // in order to avoid situations where
4624             //
4625             // 1. we try to unmap here and deadlock
4626             //
4627             // 2. unmapping at the wrong time (possibility of a parallel call
4628             // to unmap vs. address space allocate and mapMemory leading to
4629             // mapping the same gpa twice)
4630             if (mUseOldMemoryCleanupPath) {
4631                 unmapMemoryAtGpaIfExists(info->guestPhysAddr);
4632             }
4633         }
4634 
4635         if (info->virtioGpuMapped) {
4636             if (mLogging) {
4637                 fprintf(stderr, "%s: unmap hostmem %p id 0x%llx\n", __func__, info->ptr,
4638                         (unsigned long long)info->hostmemId);
4639             }
4640         }
4641 
4642         if (info->needUnmap && info->ptr) {
4643             vk->vkUnmapMemory(device, memory);
4644         }
4645 
4646         vk->vkFreeMemory(device, memory, pAllocator);
4647 
4648         mMemoryInfo.erase(memory);
4649     }
4650 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)4651     void on_vkFreeMemory(android::base::BumpPool* pool, VkDevice boxed_device,
4652                          VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator) {
4653         auto device = unbox_VkDevice(boxed_device);
4654         auto vk = dispatch_VkDevice(boxed_device);
4655 
4656         if (!device || !vk) {
4657             return;
4658         }
4659 
4660         std::lock_guard<std::recursive_mutex> lock(mLock);
4661 
4662         freeMemoryLocked(vk, device, memory, pAllocator);
4663     }
4664 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)4665     VkResult on_vkMapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory memory,
4666                             VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags,
4667                             void** ppData) {
4668         std::lock_guard<std::recursive_mutex> lock(mLock);
4669         return on_vkMapMemoryLocked(0, memory, offset, size, flags, ppData);
4670     }
on_vkMapMemoryLocked(VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)4671     VkResult on_vkMapMemoryLocked(VkDevice, VkDeviceMemory memory, VkDeviceSize offset,
4672                                   VkDeviceSize size, VkMemoryMapFlags flags, void** ppData) {
4673         auto* info = android::base::find(mMemoryInfo, memory);
4674         if (!info || !info->ptr) return VK_ERROR_MEMORY_MAP_FAILED;  // Invalid usage.
4675 
4676         *ppData = (void*)((uint8_t*)info->ptr + offset);
4677         return VK_SUCCESS;
4678     }
4679 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice,VkDeviceMemory)4680     void on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice, VkDeviceMemory) {
4681         // no-op; user-level mapping does not correspond
4682         // to any operation here.
4683     }
4684 
getMappedHostPointer(VkDeviceMemory memory)4685     uint8_t* getMappedHostPointer(VkDeviceMemory memory) {
4686         std::lock_guard<std::recursive_mutex> lock(mLock);
4687 
4688         auto* info = android::base::find(mMemoryInfo, memory);
4689         if (!info) return nullptr;
4690 
4691         return (uint8_t*)(info->ptr);
4692     }
4693 
getDeviceMemorySize(VkDeviceMemory memory)4694     VkDeviceSize getDeviceMemorySize(VkDeviceMemory memory) {
4695         std::lock_guard<std::recursive_mutex> lock(mLock);
4696 
4697         auto* info = android::base::find(mMemoryInfo, memory);
4698         if (!info) return 0;
4699 
4700         return info->size;
4701     }
4702 
usingDirectMapping() const4703     bool usingDirectMapping() const {
4704         return m_emu->features.GlDirectMem.enabled ||
4705                m_emu->features.VirtioGpuNext.enabled;
4706     }
4707 
getHostFeatureSupport() const4708     HostFeatureSupport getHostFeatureSupport() const {
4709         HostFeatureSupport res;
4710 
4711         if (!m_vk) return res;
4712 
4713         auto emu = getGlobalVkEmulation();
4714 
4715         res.supportsVulkan = emu && emu->live;
4716 
4717         if (!res.supportsVulkan) return res;
4718 
4719         const auto& props = emu->deviceInfo.physdevProps;
4720 
4721         res.supportsVulkan1_1 = props.apiVersion >= VK_API_VERSION_1_1;
4722         res.useDeferredCommands = emu->useDeferredCommands;
4723         res.useCreateResourcesWithRequirements = emu->useCreateResourcesWithRequirements;
4724 
4725         res.apiVersion = props.apiVersion;
4726         res.driverVersion = props.driverVersion;
4727         res.deviceID = props.deviceID;
4728         res.vendorID = props.vendorID;
4729         return res;
4730     }
4731 
hasInstanceExtension(VkInstance instance,const std::string & name)4732     bool hasInstanceExtension(VkInstance instance, const std::string& name) {
4733         auto* info = android::base::find(mInstanceInfo, instance);
4734         if (!info) return false;
4735 
4736         for (const auto& enabledName : info->enabledExtensionNames) {
4737             if (name == enabledName) return true;
4738         }
4739 
4740         return false;
4741     }
4742 
hasDeviceExtension(VkDevice device,const std::string & name)4743     bool hasDeviceExtension(VkDevice device, const std::string& name) {
4744         auto* info = android::base::find(mDeviceInfo, device);
4745         if (!info) return false;
4746 
4747         for (const auto& enabledName : info->enabledExtensionNames) {
4748             if (name == enabledName) return true;
4749         }
4750 
4751         return false;
4752     }
4753 
4754     // Returns whether a vector of VkExtensionProperties contains a particular extension
hasDeviceExtension(const std::vector<VkExtensionProperties> & properties,const char * name)4755     bool hasDeviceExtension(const std::vector<VkExtensionProperties>& properties,
4756                             const char* name) {
4757         for (const auto& prop : properties) {
4758             if (strcmp(prop.extensionName, name) == 0) return true;
4759         }
4760         return false;
4761     }
4762 
4763     // Convenience function to call vkEnumerateDeviceExtensionProperties and get the results as an
4764     // std::vector
enumerateDeviceExtensionProperties(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,const char * pLayerName,std::vector<VkExtensionProperties> & properties)4765     VkResult enumerateDeviceExtensionProperties(VulkanDispatch* vk, VkPhysicalDevice physicalDevice,
4766                                                 const char* pLayerName,
4767                                                 std::vector<VkExtensionProperties>& properties) {
4768         uint32_t propertyCount = 0;
4769         VkResult result = vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName,
4770                                                                    &propertyCount, nullptr);
4771         if (result != VK_SUCCESS) return result;
4772 
4773         properties.resize(propertyCount);
4774         return vk->vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, &propertyCount,
4775                                                         properties.data());
4776     }
4777 
4778     // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)4779     VkResult on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool, VkDevice,
4780                                                   VkFormat format, VkImageUsageFlags imageUsage,
4781                                                   int* grallocUsage) {
4782         getGralloc0Usage(format, imageUsage, grallocUsage);
4783         return VK_SUCCESS;
4784     }
4785 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)4786     VkResult on_vkGetSwapchainGrallocUsage2ANDROID(
4787         android::base::BumpPool* pool, VkDevice, VkFormat format, VkImageUsageFlags imageUsage,
4788         VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
4789         uint64_t* grallocProducerUsage) {
4790         getGralloc1Usage(format, imageUsage, swapchainImageUsage, grallocConsumerUsage,
4791                          grallocProducerUsage);
4792         return VK_SUCCESS;
4793     }
4794 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice boxed_device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)4795     VkResult on_vkAcquireImageANDROID(android::base::BumpPool* pool, VkDevice boxed_device,
4796                                       VkImage image, int nativeFenceFd, VkSemaphore semaphore,
4797                                       VkFence fence) {
4798         auto device = unbox_VkDevice(boxed_device);
4799         auto vk = dispatch_VkDevice(boxed_device);
4800 
4801         std::lock_guard<std::recursive_mutex> lock(mLock);
4802 
4803         auto* deviceInfo = android::base::find(mDeviceInfo, device);
4804         if (!deviceInfo) return VK_ERROR_INITIALIZATION_FAILED;
4805 
4806         auto* imageInfo = android::base::find(mImageInfo, image);
4807         if (!imageInfo) return VK_ERROR_INITIALIZATION_FAILED;
4808 
4809         VkQueue defaultQueue;
4810         uint32_t defaultQueueFamilyIndex;
4811         Lock* defaultQueueLock;
4812         if (!getDefaultQueueForDeviceLocked(device, &defaultQueue, &defaultQueueFamilyIndex,
4813                                             &defaultQueueLock)) {
4814             fprintf(stderr, "%s: cant get the default q\n", __func__);
4815             return VK_ERROR_INITIALIZATION_FAILED;
4816         }
4817 
4818         DeviceOpBuilder builder(*deviceInfo->deviceOpTracker);
4819 
4820         VkFence usedFence = fence;
4821         if (usedFence == VK_NULL_HANDLE) {
4822             usedFence = builder.CreateFenceForOp();
4823         }
4824 
4825         AndroidNativeBufferInfo* anbInfo = imageInfo->anbInfo.get();
4826 
4827         VkResult result = setAndroidNativeImageSemaphoreSignaled(
4828             vk, device, defaultQueue, defaultQueueFamilyIndex, defaultQueueLock, semaphore,
4829             usedFence, anbInfo);
4830         if (result != VK_SUCCESS) {
4831             return result;
4832         }
4833 
4834         DeviceOpWaitable aniCompletedWaitable = builder.OnQueueSubmittedWithFence(usedFence);
4835 
4836         if (semaphore != VK_NULL_HANDLE) {
4837             auto semaphoreInfo = android::base::find(mSemaphoreInfo, semaphore);
4838             if (semaphoreInfo != nullptr) {
4839                 semaphoreInfo->latestUse = aniCompletedWaitable;
4840             }
4841         }
4842         if (fence != VK_NULL_HANDLE) {
4843             auto fenceInfo = android::base::find(mFenceInfo, fence);
4844             if (fenceInfo != nullptr) {
4845                 fenceInfo->latestUse = aniCompletedWaitable;
4846             }
4847         }
4848 
4849         deviceInfo->deviceOpTracker->PollAndProcessGarbage();
4850 
4851         return VK_SUCCESS;
4852     }
4853 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)4854     VkResult on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool* pool, VkQueue boxed_queue,
4855                                                  uint32_t waitSemaphoreCount,
4856                                                  const VkSemaphore* pWaitSemaphores, VkImage image,
4857                                                  int* pNativeFenceFd) {
4858         auto queue = unbox_VkQueue(boxed_queue);
4859         auto vk = dispatch_VkQueue(boxed_queue);
4860 
4861         std::lock_guard<std::recursive_mutex> lock(mLock);
4862 
4863         auto* queueInfo = android::base::find(mQueueInfo, queue);
4864         if (!queueInfo) return VK_ERROR_INITIALIZATION_FAILED;
4865 
4866         if (mRenderDocWithMultipleVkInstances) {
4867             VkPhysicalDevice vkPhysicalDevice = mDeviceToPhysicalDevice.at(queueInfo->device);
4868             VkInstance vkInstance = mPhysicalDeviceToInstance.at(vkPhysicalDevice);
4869             mRenderDocWithMultipleVkInstances->onFrameDelimiter(vkInstance);
4870         }
4871 
4872         auto* imageInfo = android::base::find(mImageInfo, image);
4873         auto anbInfo = imageInfo->anbInfo;
4874 
4875         if (anbInfo->useVulkanNativeImage) {
4876             // vkQueueSignalReleaseImageANDROID() is only called by the Android framework's
4877             // implementation of vkQueuePresentKHR(). The guest application is responsible for
4878             // transitioning the image layout of the image passed to vkQueuePresentKHR() to
4879             // VK_IMAGE_LAYOUT_PRESENT_SRC_KHR before the call. If the host is using native
4880             // Vulkan images where `image` is backed with the same memory as its ColorBuffer,
4881             // then we need to update the tracked layout for that ColorBuffer.
4882             setColorBufferCurrentLayout(anbInfo->colorBufferHandle,
4883                                         VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
4884         }
4885 
4886         return syncImageToColorBuffer(vk, queueInfo->queueFamilyIndex, queue, queueInfo->lock,
4887                                       waitSemaphoreCount, pWaitSemaphores, pNativeFenceFd, anbInfo);
4888     }
4889 
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress)4890     VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
4891                                                   VkDevice boxed_device, VkDeviceMemory memory,
4892                                                   uint64_t* pAddress) {
4893         auto device = unbox_VkDevice(boxed_device);
4894         auto vk = dispatch_VkDevice(boxed_device);
4895 
4896         if (!m_emu->features.GlDirectMem.enabled) {
4897             fprintf(stderr,
4898                     "FATAL: Tried to use direct mapping "
4899                     "while GlDirectMem is not enabled!\n");
4900         }
4901 
4902         std::lock_guard<std::recursive_mutex> lock(mLock);
4903 
4904         if (mLogging) {
4905             fprintf(stderr, "%s: deviceMemory: 0x%llx pAddress: 0x%llx\n", __func__,
4906                     (unsigned long long)memory, (unsigned long long)(*pAddress));
4907         }
4908 
4909         if (!mapHostVisibleMemoryToGuestPhysicalAddressLocked(vk, device, memory, *pAddress)) {
4910             return VK_ERROR_OUT_OF_HOST_MEMORY;
4911         }
4912 
4913         auto* info = android::base::find(mMemoryInfo, memory);
4914         if (!info) return VK_ERROR_INITIALIZATION_FAILED;
4915 
4916         *pAddress = (uint64_t)(uintptr_t)info->ptr;
4917 
4918         return VK_SUCCESS;
4919     }
4920 
vkGetBlobInternal(VkDevice boxed_device,VkDeviceMemory memory,uint64_t hostBlobId)4921     VkResult vkGetBlobInternal(VkDevice boxed_device, VkDeviceMemory memory, uint64_t hostBlobId) {
4922         std::lock_guard<std::recursive_mutex> lock(mLock);
4923         auto* tInfo = RenderThreadInfoVk::get();
4924         uint32_t ctx_id = mSnapshotState == SnapshotState::Loading
4925                               ? kTemporaryContextIdForSnapshotLoading
4926                               : tInfo->ctx_id;
4927 
4928         auto* info = android::base::find(mMemoryInfo, memory);
4929         if (!info) return VK_ERROR_OUT_OF_HOST_MEMORY;
4930 
4931         hostBlobId = (info->blobId && !hostBlobId) ? info->blobId : hostBlobId;
4932 
4933         if (m_emu->features.SystemBlob.enabled && info->sharedMemory.has_value()) {
4934             uint32_t handleType = STREAM_MEM_HANDLE_TYPE_SHM;
4935             // We transfer ownership of the shared memory handle to the descriptor info.
4936             // The memory itself is destroyed only when all processes unmap / release their
4937             // handles.
4938             BlobManager::get()->addDescriptorInfo(ctx_id, hostBlobId,
4939                                                   info->sharedMemory->releaseHandle(), handleType,
4940                                                   info->caching, std::nullopt);
4941         } else if (m_emu->features.ExternalBlob.enabled) {
4942             VkResult result;
4943             auto device = unbox_VkDevice(boxed_device);
4944             DescriptorType handle;
4945             uint32_t handleType;
4946             struct VulkanInfo vulkanInfo = {
4947                 .memoryIndex = info->memoryIndex,
4948             };
4949             memcpy(vulkanInfo.deviceUUID, m_emu->deviceInfo.idProps.deviceUUID,
4950                    sizeof(vulkanInfo.deviceUUID));
4951             memcpy(vulkanInfo.driverUUID, m_emu->deviceInfo.idProps.driverUUID,
4952                    sizeof(vulkanInfo.driverUUID));
4953 
4954 #ifdef __unix__
4955             VkMemoryGetFdInfoKHR getFd = {
4956                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
4957                 .pNext = nullptr,
4958                 .memory = memory,
4959                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
4960             };
4961 
4962             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_FD;
4963 #endif
4964 
4965 #ifdef __linux__
4966             if (hasDeviceExtension(device, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
4967                 getFd.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
4968                 handleType = STREAM_MEM_HANDLE_TYPE_DMABUF;
4969             }
4970 #endif
4971 
4972 #ifdef __unix__
4973             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getFd, &handle);
4974             if (result != VK_SUCCESS) {
4975                 return result;
4976             }
4977 #endif
4978 
4979 #ifdef _WIN32
4980             VkMemoryGetWin32HandleInfoKHR getHandle = {
4981                 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
4982                 .pNext = nullptr,
4983                 .memory = memory,
4984                 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
4985             };
4986 
4987             handleType = STREAM_MEM_HANDLE_TYPE_OPAQUE_WIN32;
4988 
4989             result = m_emu->deviceInfo.getMemoryHandleFunc(device, &getHandle, &handle);
4990             if (result != VK_SUCCESS) {
4991                 return result;
4992             }
4993 #endif
4994 
4995             ManagedDescriptor managedHandle(handle);
4996             BlobManager::get()->addDescriptorInfo(ctx_id, hostBlobId, std::move(managedHandle),
4997                                                   handleType, info->caching,
4998                                                   std::optional<VulkanInfo>(vulkanInfo));
4999         } else if (!info->needUnmap) {
5000             auto device = unbox_VkDevice(boxed_device);
5001             auto vk = dispatch_VkDevice(boxed_device);
5002             VkResult mapResult = vk->vkMapMemory(device, memory, 0, info->size, 0, &info->ptr);
5003             if (mapResult != VK_SUCCESS) {
5004                 return VK_ERROR_OUT_OF_HOST_MEMORY;
5005             }
5006 
5007             info->needUnmap = true;
5008         }
5009 
5010         if (info->needUnmap) {
5011             uint64_t hva = (uint64_t)(uintptr_t)(info->ptr);
5012             uint64_t size = (uint64_t)(uintptr_t)(info->size);
5013 
5014             uint64_t alignedHva = hva & kPageMaskForBlob;
5015             uint64_t alignedSize =
5016                 kPageSizeforBlob * ((size + kPageSizeforBlob - 1) / kPageSizeforBlob);
5017 
5018             if (hva != alignedHva) {
5019                 ERR("Mapping non page-size (0x%" PRIx64
5020                     ") aligned host virtual address:%p "
5021                     "using the aligned host virtual address:%p. The underlying resources "
5022                     "using this blob may be corrupted/offset.",
5023                     kPageSizeforBlob, hva, alignedHva);
5024             }
5025             BlobManager::get()->addMapping(ctx_id, hostBlobId, (void*)(uintptr_t)alignedHva,
5026                                            info->caching);
5027             info->virtioGpuMapped = true;
5028             info->hostmemId = hostBlobId;
5029         }
5030 
5031         return VK_SUCCESS;
5032     }
5033 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory)5034     VkResult on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
5035                                 VkDeviceMemory memory) {
5036         return vkGetBlobInternal(boxed_device, memory, 0);
5037     }
5038 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)5039     VkResult on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool* pool,
5040                                                  VkDevice boxed_device, VkDeviceMemory memory,
5041                                                  uint64_t* pAddress, uint64_t* pSize,
5042                                                  uint64_t* pHostmemId) {
5043         hostBlobId++;
5044         *pHostmemId = hostBlobId;
5045         return vkGetBlobInternal(boxed_device, memory, hostBlobId);
5046     }
5047 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)5048     VkResult on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
5049                                        VkDeviceMemory memory,
5050                                        const VkAllocationCallbacks* pAllocator) {
5051         on_vkFreeMemory(pool, boxed_device, memory, pAllocator);
5052 
5053         return VK_SUCCESS;
5054     }
5055 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)5056     VkResult on_vkAllocateCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
5057                                          const VkCommandBufferAllocateInfo* pAllocateInfo,
5058                                          VkCommandBuffer* pCommandBuffers) {
5059         auto device = unbox_VkDevice(boxed_device);
5060         auto vk = dispatch_VkDevice(boxed_device);
5061 
5062         VkResult result = vk->vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
5063 
5064         if (result != VK_SUCCESS) {
5065             return result;
5066         }
5067 
5068         std::lock_guard<std::recursive_mutex> lock(mLock);
5069 
5070         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5071         if (!deviceInfo) return VK_ERROR_UNKNOWN;
5072 
5073         for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
5074             mCmdBufferInfo[pCommandBuffers[i]] = CommandBufferInfo();
5075             mCmdBufferInfo[pCommandBuffers[i]].device = device;
5076             mCmdBufferInfo[pCommandBuffers[i]].debugUtilsHelper = deviceInfo->debugUtilsHelper;
5077             mCmdBufferInfo[pCommandBuffers[i]].cmdPool = pAllocateInfo->commandPool;
5078             auto boxed = new_boxed_VkCommandBuffer(pCommandBuffers[i], vk,
5079                                                    false /* does not own dispatch */);
5080             mCmdBufferInfo[pCommandBuffers[i]].boxed = boxed;
5081             pCommandBuffers[i] = (VkCommandBuffer)boxed;
5082         }
5083         return result;
5084     }
5085 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)5086     VkResult on_vkCreateCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
5087                                     const VkCommandPoolCreateInfo* pCreateInfo,
5088                                     const VkAllocationCallbacks* pAllocator,
5089                                     VkCommandPool* pCommandPool) {
5090         auto device = unbox_VkDevice(boxed_device);
5091         auto vk = dispatch_VkDevice(boxed_device);
5092 
5093         VkResult result = vk->vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
5094         if (result != VK_SUCCESS) {
5095             return result;
5096         }
5097         std::lock_guard<std::recursive_mutex> lock(mLock);
5098         mCmdPoolInfo[*pCommandPool] = CommandPoolInfo();
5099         auto& cmdPoolInfo = mCmdPoolInfo[*pCommandPool];
5100         cmdPoolInfo.device = device;
5101 
5102         *pCommandPool = new_boxed_non_dispatchable_VkCommandPool(*pCommandPool);
5103         cmdPoolInfo.boxed = *pCommandPool;
5104 
5105         return result;
5106     }
5107 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)5108     void on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
5109                                  VkCommandPool commandPool,
5110                                  const VkAllocationCallbacks* pAllocator) {
5111         auto device = unbox_VkDevice(boxed_device);
5112         auto vk = dispatch_VkDevice(boxed_device);
5113 
5114         vk->vkDestroyCommandPool(device, commandPool, pAllocator);
5115         std::lock_guard<std::recursive_mutex> lock(mLock);
5116         const auto* cmdPoolInfo = android::base::find(mCmdPoolInfo, commandPool);
5117         if (cmdPoolInfo) {
5118             removeCommandBufferInfo(cmdPoolInfo->cmdBuffers);
5119             mCmdPoolInfo.erase(commandPool);
5120         }
5121     }
5122 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)5123     VkResult on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice boxed_device,
5124                                    VkCommandPool commandPool, VkCommandPoolResetFlags flags) {
5125         auto device = unbox_VkDevice(boxed_device);
5126         auto vk = dispatch_VkDevice(boxed_device);
5127 
5128         VkResult result = vk->vkResetCommandPool(device, commandPool, flags);
5129         if (result != VK_SUCCESS) {
5130             return result;
5131         }
5132         return result;
5133     }
5134 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)5135     void on_vkCmdExecuteCommands(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
5136                                  uint32_t commandBufferCount,
5137                                  const VkCommandBuffer* pCommandBuffers) {
5138         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5139         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5140 
5141         vk->vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
5142         std::lock_guard<std::recursive_mutex> lock(mLock);
5143         CommandBufferInfo& cmdBuffer = mCmdBufferInfo[commandBuffer];
5144         cmdBuffer.subCmds.insert(cmdBuffer.subCmds.end(), pCommandBuffers,
5145                                  pCommandBuffers + commandBufferCount);
5146     }
5147 
dispatchVkQueueSubmit(VulkanDispatch * vk,VkQueue unboxed_queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)5148     VkResult dispatchVkQueueSubmit(VulkanDispatch* vk, VkQueue unboxed_queue, uint32_t submitCount,
5149                                    const VkSubmitInfo* pSubmits, VkFence fence) {
5150         return vk->vkQueueSubmit(unboxed_queue, submitCount, pSubmits, fence);
5151     }
5152 
dispatchVkQueueSubmit(VulkanDispatch * vk,VkQueue unboxed_queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)5153     VkResult dispatchVkQueueSubmit(VulkanDispatch* vk, VkQueue unboxed_queue, uint32_t submitCount,
5154                                    const VkSubmitInfo2* pSubmits, VkFence fence) {
5155         return vk->vkQueueSubmit2(unboxed_queue, submitCount, pSubmits, fence);
5156     }
5157 
getCommandBufferCount(const VkSubmitInfo & submitInfo)5158     int getCommandBufferCount(const VkSubmitInfo& submitInfo) {
5159         return submitInfo.commandBufferCount;
5160     }
5161 
getCommandBuffer(const VkSubmitInfo & submitInfo,int idx)5162     VkCommandBuffer getCommandBuffer(const VkSubmitInfo& submitInfo, int idx) {
5163         return submitInfo.pCommandBuffers[idx];
5164     }
5165 
getCommandBufferCount(const VkSubmitInfo2 & submitInfo)5166     int getCommandBufferCount(const VkSubmitInfo2& submitInfo) {
5167         return submitInfo.commandBufferInfoCount;
5168     }
5169 
getCommandBuffer(const VkSubmitInfo2 & submitInfo,int idx)5170     VkCommandBuffer getCommandBuffer(const VkSubmitInfo2& submitInfo, int idx) {
5171         return submitInfo.pCommandBufferInfos[idx].commandBuffer;
5172     }
5173 
5174     template <typename VkSubmitInfoType>
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t submitCount,const VkSubmitInfoType * pSubmits,VkFence fence)5175     VkResult on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue boxed_queue,
5176                               uint32_t submitCount, const VkSubmitInfoType* pSubmits,
5177                               VkFence fence) {
5178         auto queue = unbox_VkQueue(boxed_queue);
5179         auto vk = dispatch_VkQueue(boxed_queue);
5180 
5181         std::unordered_set<HandleType> acquiredColorBuffers;
5182         std::unordered_set<HandleType> releasedColorBuffers;
5183         bool vulkanOnly = mGuestUsesAngle;
5184         if (!vulkanOnly) {
5185             {
5186                 std::lock_guard<std::recursive_mutex> lock(mLock);
5187                 for (int i = 0; i < submitCount; i++) {
5188                     for (int j = 0; j < getCommandBufferCount(pSubmits[i]); j++) {
5189                         VkCommandBuffer cmdBuffer = getCommandBuffer(pSubmits[i], j);
5190                         CommandBufferInfo* cmdBufferInfo =
5191                             android::base::find(mCmdBufferInfo, cmdBuffer);
5192                         if (!cmdBufferInfo) {
5193                             continue;
5194                         }
5195                         acquiredColorBuffers.merge(cmdBufferInfo->acquiredColorBuffers);
5196                         releasedColorBuffers.merge(cmdBufferInfo->releasedColorBuffers);
5197                         for (const auto& ite : cmdBufferInfo->cbLayouts) {
5198                             setColorBufferCurrentLayout(ite.first, ite.second);
5199                         }
5200                     }
5201                 }
5202             }
5203             auto fb = FrameBuffer::getFB();
5204             if (fb) {
5205                 for (HandleType cb : acquiredColorBuffers) {
5206                     fb->invalidateColorBufferForVk(cb);
5207                 }
5208             }
5209         }
5210 
5211         VkDevice device = VK_NULL_HANDLE;
5212         Lock* ql;
5213         {
5214             std::lock_guard<std::recursive_mutex> lock(mLock);
5215 
5216             {
5217                 auto* queueInfo = android::base::find(mQueueInfo, queue);
5218                 if (queueInfo) {
5219                     device = queueInfo->device;
5220                     // Unsafe to release when snapshot enabled.
5221                     // Snapshot load might fail to find the shader modules if we release them here.
5222                     if (!snapshotsEnabled()) {
5223                         sBoxedHandleManager.processDelayedRemovesGlobalStateLocked(device);
5224                     }
5225                 }
5226             }
5227 
5228             for (uint32_t i = 0; i < submitCount; i++) {
5229                 executePreprocessRecursive(pSubmits[i]);
5230             }
5231 
5232             auto* queueInfo = android::base::find(mQueueInfo, queue);
5233             if (!queueInfo) return VK_SUCCESS;
5234             ql = queueInfo->lock;
5235         }
5236 
5237         VkFence localFence = VK_NULL_HANDLE;
5238         if (!releasedColorBuffers.empty() && fence == VK_NULL_HANDLE) {
5239             // Need to manually inject a fence so that we could update color buffer
5240             // after queue submits..
5241             // This should almost never happen.
5242             VkFenceCreateInfo fenceCreateInfo{
5243                 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
5244             };
5245             vk->vkCreateFence(device, &fenceCreateInfo, nullptr, &localFence);
5246         }
5247         AutoLock qlock(*ql);
5248         auto result = dispatchVkQueueSubmit(vk, queue, submitCount, pSubmits, localFence ?: fence);
5249 
5250         if (result != VK_SUCCESS) {
5251             return result;
5252         }
5253         {
5254             std::lock_guard<std::recursive_mutex> lock(mLock);
5255             // Update image layouts
5256             for (int i = 0; i < submitCount; i++) {
5257                 for (int j = 0; j < getCommandBufferCount(pSubmits[i]); j++) {
5258                     VkCommandBuffer cmdBuffer = getCommandBuffer(pSubmits[i], j);
5259                     CommandBufferInfo* cmdBufferInfo =
5260                         android::base::find(mCmdBufferInfo, cmdBuffer);
5261                     if (!cmdBufferInfo) {
5262                         continue;
5263                     }
5264                     for (const auto& ite : cmdBufferInfo->imageLayouts) {
5265                         auto imageIte = mImageInfo.find(ite.first);
5266                         if (imageIte == mImageInfo.end()) {
5267                             continue;
5268                         }
5269                         imageIte->second.layout = ite.second;
5270                     }
5271                 }
5272             }
5273             // After vkQueueSubmit is called, we can signal the conditional variable
5274             // in FenceInfo, so that other threads (e.g. SyncThread) can call
5275             // waitForFence() on this fence.
5276             auto* fenceInfo = android::base::find(mFenceInfo, fence);
5277             if (fenceInfo) {
5278                 fenceInfo->state = FenceInfo::State::kWaitable;
5279                 fenceInfo->lock.lock();
5280                 fenceInfo->cv.signalAndUnlock(&fenceInfo->lock);
5281             }
5282         }
5283         if (!releasedColorBuffers.empty()) {
5284             vk->vkWaitForFences(device, 1, localFence ? &localFence : &fence, VK_TRUE,
5285                                 /* 1 sec */ 1000000000L);
5286             auto fb = FrameBuffer::getFB();
5287             if (fb) {
5288                 for (HandleType cb : releasedColorBuffers) {
5289                     fb->flushColorBufferFromVk(cb);
5290                 }
5291             }
5292         }
5293         if (localFence) {
5294             vk->vkDestroyFence(device, localFence, nullptr);
5295         }
5296 
5297         return result;
5298     }
5299 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue boxed_queue)5300     VkResult on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue boxed_queue) {
5301         auto queue = unbox_VkQueue(boxed_queue);
5302         auto vk = dispatch_VkQueue(boxed_queue);
5303 
5304         if (!queue) return VK_SUCCESS;
5305 
5306         Lock* ql;
5307         {
5308             std::lock_guard<std::recursive_mutex> lock(mLock);
5309             auto* queueInfo = android::base::find(mQueueInfo, queue);
5310             if (!queueInfo) return VK_SUCCESS;
5311             ql = queueInfo->lock;
5312         }
5313 
5314         AutoLock qlock(*ql);
5315         return vk->vkQueueWaitIdle(queue);
5316     }
5317 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)5318     VkResult on_vkResetCommandBuffer(android::base::BumpPool* pool,
5319                                      VkCommandBuffer boxed_commandBuffer,
5320                                      VkCommandBufferResetFlags flags) {
5321         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5322         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5323 
5324         VkResult result = vk->vkResetCommandBuffer(commandBuffer, flags);
5325         if (VK_SUCCESS == result) {
5326             std::lock_guard<std::recursive_mutex> lock(mLock);
5327             auto& bufferInfo = mCmdBufferInfo[commandBuffer];
5328             bufferInfo.reset();
5329         }
5330         return result;
5331     }
5332 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice boxed_device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)5333     void on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice boxed_device,
5334                                  VkCommandPool commandPool, uint32_t commandBufferCount,
5335                                  const VkCommandBuffer* pCommandBuffers) {
5336         auto device = unbox_VkDevice(boxed_device);
5337         auto vk = dispatch_VkDevice(boxed_device);
5338 
5339         if (!device) return;
5340         vk->vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
5341         std::lock_guard<std::recursive_mutex> lock(mLock);
5342         for (uint32_t i = 0; i < commandBufferCount; i++) {
5343             const auto& cmdBufferInfoIt = mCmdBufferInfo.find(pCommandBuffers[i]);
5344             if (cmdBufferInfoIt != mCmdBufferInfo.end()) {
5345                 const auto& cmdPoolInfoIt = mCmdPoolInfo.find(cmdBufferInfoIt->second.cmdPool);
5346                 if (cmdPoolInfoIt != mCmdPoolInfo.end()) {
5347                     cmdPoolInfoIt->second.cmdBuffers.erase(pCommandBuffers[i]);
5348                 }
5349                 // Done in decoder
5350                 // delete_VkCommandBuffer(cmdBufferInfoIt->second.boxed);
5351                 mCmdBufferInfo.erase(cmdBufferInfoIt);
5352             }
5353         }
5354     }
5355 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice boxed_physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)5356     void on_vkGetPhysicalDeviceExternalSemaphoreProperties(
5357         android::base::BumpPool* pool, VkPhysicalDevice boxed_physicalDevice,
5358         const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
5359         VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
5360         auto physicalDevice = unbox_VkPhysicalDevice(boxed_physicalDevice);
5361 
5362         if (!physicalDevice) {
5363             return;
5364         }
5365         // Cannot forward this call to driver because nVidia linux driver crahses on it.
5366         switch (pExternalSemaphoreInfo->handleType) {
5367             case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT:
5368                 pExternalSemaphoreProperties->exportFromImportedHandleTypes =
5369                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
5370                 pExternalSemaphoreProperties->compatibleHandleTypes =
5371                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
5372                 pExternalSemaphoreProperties->externalSemaphoreFeatures =
5373                     VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
5374                     VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
5375                 return;
5376             case VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT:
5377                 pExternalSemaphoreProperties->exportFromImportedHandleTypes =
5378                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
5379                 pExternalSemaphoreProperties->compatibleHandleTypes =
5380                     VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
5381                 pExternalSemaphoreProperties->externalSemaphoreFeatures =
5382                     VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
5383                     VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
5384                 return;
5385             default:
5386                 break;
5387         }
5388 
5389         pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
5390         pExternalSemaphoreProperties->compatibleHandleTypes = 0;
5391         pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
5392     }
5393 
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)5394     VkResult on_vkCreateDescriptorUpdateTemplate(
5395         android::base::BumpPool* pool, VkDevice boxed_device,
5396         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
5397         const VkAllocationCallbacks* pAllocator,
5398         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
5399         auto device = unbox_VkDevice(boxed_device);
5400         auto vk = dispatch_VkDevice(boxed_device);
5401 
5402         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
5403 
5404         VkResult res =
5405             vk->vkCreateDescriptorUpdateTemplate(device, &descriptorUpdateTemplateInfo.createInfo,
5406                                                  pAllocator, pDescriptorUpdateTemplate);
5407 
5408         if (res == VK_SUCCESS) {
5409             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
5410                                              descriptorUpdateTemplateInfo);
5411             *pDescriptorUpdateTemplate =
5412                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
5413         }
5414 
5415         return res;
5416     }
5417 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)5418     VkResult on_vkCreateDescriptorUpdateTemplateKHR(
5419         android::base::BumpPool* pool, VkDevice boxed_device,
5420         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
5421         const VkAllocationCallbacks* pAllocator,
5422         VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
5423         auto device = unbox_VkDevice(boxed_device);
5424         auto vk = dispatch_VkDevice(boxed_device);
5425 
5426         auto descriptorUpdateTemplateInfo = calcLinearizedDescriptorUpdateTemplateInfo(pCreateInfo);
5427 
5428         VkResult res = vk->vkCreateDescriptorUpdateTemplateKHR(
5429             device, &descriptorUpdateTemplateInfo.createInfo, pAllocator,
5430             pDescriptorUpdateTemplate);
5431 
5432         if (res == VK_SUCCESS) {
5433             registerDescriptorUpdateTemplate(*pDescriptorUpdateTemplate,
5434                                              descriptorUpdateTemplateInfo);
5435             *pDescriptorUpdateTemplate =
5436                 new_boxed_non_dispatchable_VkDescriptorUpdateTemplate(*pDescriptorUpdateTemplate);
5437         }
5438 
5439         return res;
5440     }
5441 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)5442     void on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool* pool, VkDevice boxed_device,
5443                                               VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5444                                               const VkAllocationCallbacks* pAllocator) {
5445         auto device = unbox_VkDevice(boxed_device);
5446         auto vk = dispatch_VkDevice(boxed_device);
5447 
5448         vk->vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
5449 
5450         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
5451     }
5452 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)5453     void on_vkDestroyDescriptorUpdateTemplateKHR(
5454         android::base::BumpPool* pool, VkDevice boxed_device,
5455         VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5456         const VkAllocationCallbacks* pAllocator) {
5457         auto device = unbox_VkDevice(boxed_device);
5458         auto vk = dispatch_VkDevice(boxed_device);
5459 
5460         vk->vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator);
5461 
5462         unregisterDescriptorUpdateTemplate(descriptorUpdateTemplate);
5463     }
5464 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)5465     void on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
5466         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
5467         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
5468         uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
5469         const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
5470         const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
5471         const VkBufferView* pBufferViews) {
5472         auto device = unbox_VkDevice(boxed_device);
5473         auto vk = dispatch_VkDevice(boxed_device);
5474 
5475         std::lock_guard<std::recursive_mutex> lock(mLock);
5476         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
5477         if (!info) return;
5478 
5479         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
5480                imageInfoCount * sizeof(VkDescriptorImageInfo));
5481         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
5482                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
5483         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
5484                bufferViewCount * sizeof(VkBufferView));
5485 
5486         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
5487                                               info->data.data());
5488     }
5489 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)5490     void on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
5491         android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
5492         VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
5493         uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
5494         const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
5495         const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
5496         const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
5497         const uint8_t* pInlineUniformBlockData) {
5498         auto device = unbox_VkDevice(boxed_device);
5499         auto vk = dispatch_VkDevice(boxed_device);
5500 
5501         std::lock_guard<std::recursive_mutex> lock(mLock);
5502         auto* info = android::base::find(mDescriptorUpdateTemplateInfo, descriptorUpdateTemplate);
5503         if (!info) return;
5504 
5505         memcpy(info->data.data() + info->imageInfoStart, pImageInfos,
5506                imageInfoCount * sizeof(VkDescriptorImageInfo));
5507         memcpy(info->data.data() + info->bufferInfoStart, pBufferInfos,
5508                bufferInfoCount * sizeof(VkDescriptorBufferInfo));
5509         memcpy(info->data.data() + info->bufferViewStart, pBufferViews,
5510                bufferViewCount * sizeof(VkBufferView));
5511         memcpy(info->data.data() + info->inlineUniformBlockStart, pInlineUniformBlockData,
5512                inlineUniformBlockCount);
5513 
5514         vk->vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate,
5515                                               info->data.data());
5516     }
5517 
hostSyncCommandBuffer(const char * tag,VkCommandBuffer boxed_commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)5518     void hostSyncCommandBuffer(const char* tag, VkCommandBuffer boxed_commandBuffer,
5519                                uint32_t needHostSync, uint32_t sequenceNumber) {
5520         auto nextDeadline = []() {
5521             return android::base::getUnixTimeUs() + 10000;  // 10 ms
5522         };
5523 
5524         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
5525 
5526         OrderMaintenanceInfo* order = ordmaint_VkCommandBuffer(boxed_commandBuffer);
5527         if (!order) return;
5528 
5529         AutoLock lock(order->lock);
5530 
5531         if (needHostSync) {
5532             while (
5533                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
5534                 auto waitUntilUs = nextDeadline();
5535                 order->cv.timedWait(&order->lock, waitUntilUs);
5536 
5537                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
5538                     break;
5539                 }
5540             }
5541         }
5542 
5543         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
5544         order->cv.signal();
5545         releaseOrderMaintInfo(order);
5546     }
5547 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)5548     void on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
5549                                           VkCommandBuffer commandBuffer, uint32_t needHostSync,
5550                                           uint32_t sequenceNumber) {
5551         this->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
5552     }
5553 
hostSyncQueue(const char * tag,VkQueue boxed_queue,uint32_t needHostSync,uint32_t sequenceNumber)5554     void hostSyncQueue(const char* tag, VkQueue boxed_queue, uint32_t needHostSync,
5555                        uint32_t sequenceNumber) {
5556         auto nextDeadline = []() {
5557             return android::base::getUnixTimeUs() + 10000;  // 10 ms
5558         };
5559 
5560         auto timeoutDeadline = android::base::getUnixTimeUs() + 5000000;  // 5 s
5561 
5562         OrderMaintenanceInfo* order = ordmaint_VkQueue(boxed_queue);
5563         if (!order) return;
5564 
5565         AutoLock lock(order->lock);
5566 
5567         if (needHostSync) {
5568             while (
5569                 (sequenceNumber - __atomic_load_n(&order->sequenceNumber, __ATOMIC_ACQUIRE) != 1)) {
5570                 auto waitUntilUs = nextDeadline();
5571                 order->cv.timedWait(&order->lock, waitUntilUs);
5572 
5573                 if (timeoutDeadline < android::base::getUnixTimeUs()) {
5574                     break;
5575                 }
5576             }
5577         }
5578 
5579         __atomic_store_n(&order->sequenceNumber, sequenceNumber, __ATOMIC_RELEASE);
5580         order->cv.signal();
5581         releaseOrderMaintInfo(order);
5582     }
5583 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)5584     void on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
5585                                   uint32_t needHostSync, uint32_t sequenceNumber) {
5586         this->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
5587     }
5588 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)5589     VkResult on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool* pool,
5590                                                     VkDevice boxed_device,
5591                                                     const VkImageCreateInfo* pCreateInfo,
5592                                                     const VkAllocationCallbacks* pAllocator,
5593                                                     VkImage* pImage,
5594                                                     VkMemoryRequirements* pMemoryRequirements) {
5595         if (pMemoryRequirements) {
5596             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
5597         }
5598 
5599         VkResult imageCreateRes =
5600             on_vkCreateImage(pool, boxed_device, pCreateInfo, pAllocator, pImage);
5601 
5602         if (imageCreateRes != VK_SUCCESS) {
5603             return imageCreateRes;
5604         }
5605 
5606         on_vkGetImageMemoryRequirements(pool, boxed_device, unbox_VkImage(*pImage),
5607                                         pMemoryRequirements);
5608 
5609         return imageCreateRes;
5610     }
5611 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)5612     VkResult on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool* pool,
5613                                                      VkDevice boxed_device,
5614                                                      const VkBufferCreateInfo* pCreateInfo,
5615                                                      const VkAllocationCallbacks* pAllocator,
5616                                                      VkBuffer* pBuffer,
5617                                                      VkMemoryRequirements* pMemoryRequirements) {
5618         if (pMemoryRequirements) {
5619             memset(pMemoryRequirements, 0, sizeof(*pMemoryRequirements));
5620         }
5621 
5622         VkResult bufferCreateRes =
5623             on_vkCreateBuffer(pool, boxed_device, pCreateInfo, pAllocator, pBuffer);
5624 
5625         if (bufferCreateRes != VK_SUCCESS) {
5626             return bufferCreateRes;
5627         }
5628 
5629         on_vkGetBufferMemoryRequirements(pool, boxed_device, unbox_VkBuffer(*pBuffer),
5630                                          pMemoryRequirements);
5631 
5632         return bufferCreateRes;
5633     }
5634 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)5635     VkResult on_vkBeginCommandBuffer(android::base::BumpPool* pool,
5636                                      VkCommandBuffer boxed_commandBuffer,
5637                                      const VkCommandBufferBeginInfo* pBeginInfo,
5638                                      const VkDecoderContext& context) {
5639         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5640         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5641         VkResult result = vk->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
5642 
5643         if (result != VK_SUCCESS) {
5644             return result;
5645         }
5646 
5647         std::lock_guard<std::recursive_mutex> lock(mLock);
5648 
5649         auto* commandBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
5650         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
5651         commandBufferInfo->reset();
5652 
5653         if (context.processName) {
5654             commandBufferInfo->debugUtilsHelper.cmdBeginDebugLabel(commandBuffer, "Process %s",
5655                                                                    context.processName);
5656         }
5657 
5658         return VK_SUCCESS;
5659     }
5660 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)5661     VkResult on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
5662                                                 VkCommandBuffer boxed_commandBuffer,
5663                                                 const VkCommandBufferBeginInfo* pBeginInfo,
5664                                                 const VkDecoderContext& context) {
5665         return this->on_vkBeginCommandBuffer(pool, boxed_commandBuffer, pBeginInfo, context);
5666     }
5667 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)5668     VkResult on_vkEndCommandBuffer(android::base::BumpPool* pool,
5669                                    VkCommandBuffer boxed_commandBuffer,
5670                                    const VkDecoderContext& context) {
5671         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5672         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5673 
5674         std::lock_guard<std::recursive_mutex> lock(mLock);
5675 
5676         auto* commandBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
5677         if (!commandBufferInfo) return VK_ERROR_UNKNOWN;
5678 
5679         if (context.processName) {
5680             commandBufferInfo->debugUtilsHelper.cmdEndDebugLabel(commandBuffer);
5681         }
5682 
5683         return vk->vkEndCommandBuffer(commandBuffer);
5684     }
5685 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkDecoderContext & context)5686     void on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
5687                                           VkCommandBuffer boxed_commandBuffer,
5688                                           const VkDecoderContext& context) {
5689         on_vkEndCommandBuffer(pool, boxed_commandBuffer, context);
5690     }
5691 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkCommandBufferResetFlags flags)5692     void on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
5693                                             VkCommandBuffer boxed_commandBuffer,
5694                                             VkCommandBufferResetFlags flags) {
5695         on_vkResetCommandBuffer(pool, boxed_commandBuffer, flags);
5696     }
5697 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)5698     void on_vkCmdBindPipeline(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
5699                               VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
5700         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5701         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5702         vk->vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
5703         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
5704             std::lock_guard<std::recursive_mutex> lock(mLock);
5705             auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
5706             if (cmdBufferInfo) {
5707                 cmdBufferInfo->computePipeline = pipeline;
5708             }
5709         }
5710     }
5711 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)5712     void on_vkCmdBindDescriptorSets(android::base::BumpPool* pool,
5713                                     VkCommandBuffer boxed_commandBuffer,
5714                                     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout,
5715                                     uint32_t firstSet, uint32_t descriptorSetCount,
5716                                     const VkDescriptorSet* pDescriptorSets,
5717                                     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
5718         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5719         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5720         vk->vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet,
5721                                     descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
5722                                     pDynamicOffsets);
5723         if (pipelineBindPoint == VK_PIPELINE_BIND_POINT_COMPUTE) {
5724             std::lock_guard<std::recursive_mutex> lock(mLock);
5725             auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
5726             if (cmdBufferInfo) {
5727                 cmdBufferInfo->descriptorLayout = layout;
5728 
5729                 if (descriptorSetCount) {
5730                     cmdBufferInfo->firstSet = firstSet;
5731                     cmdBufferInfo->descriptorSets.assign(pDescriptorSets,
5732                                                          pDescriptorSets + descriptorSetCount);
5733                     cmdBufferInfo->dynamicOffsets.assign(pDynamicOffsets,
5734                                                          pDynamicOffsets + dynamicOffsetCount);
5735                 }
5736             }
5737         }
5738     }
5739 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)5740     VkResult on_vkCreateRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
5741                                    const VkRenderPassCreateInfo* pCreateInfo,
5742                                    const VkAllocationCallbacks* pAllocator,
5743                                    VkRenderPass* pRenderPass) {
5744         auto device = unbox_VkDevice(boxed_device);
5745         auto vk = dispatch_VkDevice(boxed_device);
5746         VkRenderPassCreateInfo createInfo;
5747         bool needReformat = false;
5748         std::lock_guard<std::recursive_mutex> lock(mLock);
5749 
5750         auto* deviceInfo = android::base::find(mDeviceInfo, device);
5751         if (!deviceInfo) return VK_ERROR_OUT_OF_HOST_MEMORY;
5752         if (deviceInfo->emulateTextureEtc2 || deviceInfo->emulateTextureAstc) {
5753             for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
5754                 if (deviceInfo->needEmulatedDecompression(pCreateInfo->pAttachments[i].format)) {
5755                     needReformat = true;
5756                     break;
5757                 }
5758             }
5759         }
5760         std::vector<VkAttachmentDescription> attachments;
5761         if (needReformat) {
5762             createInfo = *pCreateInfo;
5763             attachments.assign(pCreateInfo->pAttachments,
5764                                pCreateInfo->pAttachments + pCreateInfo->attachmentCount);
5765             createInfo.pAttachments = attachments.data();
5766             for (auto& attachment : attachments) {
5767                 attachment.format = CompressedImageInfo::getOutputFormat(attachment.format);
5768             }
5769             pCreateInfo = &createInfo;
5770         }
5771         VkResult res = vk->vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
5772         if (res != VK_SUCCESS) {
5773             return res;
5774         }
5775 
5776         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
5777         renderPassInfo.device = device;
5778 
5779         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
5780 
5781         return res;
5782     }
5783 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)5784     VkResult on_vkCreateRenderPass2(android::base::BumpPool* pool, VkDevice boxed_device,
5785                                     const VkRenderPassCreateInfo2* pCreateInfo,
5786                                     const VkAllocationCallbacks* pAllocator,
5787                                     VkRenderPass* pRenderPass) {
5788         auto device = unbox_VkDevice(boxed_device);
5789         auto vk = dispatch_VkDevice(boxed_device);
5790         std::lock_guard<std::recursive_mutex> lock(mLock);
5791 
5792         VkResult res = vk->vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass);
5793         if (res != VK_SUCCESS) {
5794             return res;
5795         }
5796 
5797         auto& renderPassInfo = mRenderPassInfo[*pRenderPass];
5798         renderPassInfo.device = device;
5799 
5800         *pRenderPass = new_boxed_non_dispatchable_VkRenderPass(*pRenderPass);
5801 
5802         return res;
5803     }
5804 
destroyRenderPassLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)5805     void destroyRenderPassLocked(VkDevice device, VulkanDispatch* deviceDispatch,
5806                                  VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
5807         deviceDispatch->vkDestroyRenderPass(device, renderPass, pAllocator);
5808 
5809         mRenderPassInfo.erase(renderPass);
5810     }
5811 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)5812     void on_vkDestroyRenderPass(android::base::BumpPool* pool, VkDevice boxed_device,
5813                                 VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator) {
5814         auto device = unbox_VkDevice(boxed_device);
5815         auto deviceDispatch = dispatch_VkDevice(boxed_device);
5816 
5817         std::lock_guard<std::recursive_mutex> lock(mLock);
5818         destroyRenderPassLocked(device, deviceDispatch, renderPass, pAllocator);
5819     }
5820 
registerRenderPassBeginInfo(VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin)5821     void registerRenderPassBeginInfo(VkCommandBuffer commandBuffer,
5822                                      const VkRenderPassBeginInfo* pRenderPassBegin) {
5823         CommandBufferInfo* cmdBufferInfo = android::base::find(mCmdBufferInfo, commandBuffer);
5824         FramebufferInfo* fbInfo =
5825             android::base::find(mFramebufferInfo, pRenderPassBegin->framebuffer);
5826         cmdBufferInfo->releasedColorBuffers.insert(fbInfo->attachedColorBuffers.begin(),
5827                                                    fbInfo->attachedColorBuffers.end());
5828     }
5829 
on_vkCmdBeginRenderPass(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)5830     void on_vkCmdBeginRenderPass(android::base::BumpPool* pool, VkCommandBuffer boxed_commandBuffer,
5831                                  const VkRenderPassBeginInfo* pRenderPassBegin,
5832                                  VkSubpassContents contents) {
5833         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5834         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5835         registerRenderPassBeginInfo(commandBuffer, pRenderPassBegin);
5836         vk->vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
5837     }
5838 
on_vkCmdBeginRenderPass2(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)5839     void on_vkCmdBeginRenderPass2(android::base::BumpPool* pool,
5840                                   VkCommandBuffer boxed_commandBuffer,
5841                                   const VkRenderPassBeginInfo* pRenderPassBegin,
5842                                   const VkSubpassBeginInfo* pSubpassBeginInfo) {
5843         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5844         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5845         registerRenderPassBeginInfo(commandBuffer, pRenderPassBegin);
5846         vk->vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
5847     }
5848 
on_vkCmdBeginRenderPass2KHR(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)5849     void on_vkCmdBeginRenderPass2KHR(android::base::BumpPool* pool,
5850                                      VkCommandBuffer boxed_commandBuffer,
5851                                      const VkRenderPassBeginInfo* pRenderPassBegin,
5852                                      const VkSubpassBeginInfo* pSubpassBeginInfo) {
5853         on_vkCmdBeginRenderPass2(pool, boxed_commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
5854     }
5855 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer boxed_commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)5856     void on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
5857                                       VkCommandBuffer boxed_commandBuffer, VkQueryPool queryPool,
5858                                       uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer,
5859                                       VkDeviceSize dstOffset, VkDeviceSize stride,
5860                                       VkQueryResultFlags flags) {
5861         auto commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
5862         auto vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
5863         if (queryCount == 1 && stride == 0) {
5864             // Some drivers don't seem to handle stride==0 very well.
5865             // In fact, the spec does not say what should happen with stride==0.
5866             // So we just use the largest stride possible.
5867             stride = mBufferInfo[dstBuffer].size - dstOffset;
5868         }
5869         vk->vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer,
5870                                       dstOffset, stride, flags);
5871     }
5872 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)5873     VkResult on_vkCreateFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
5874                                     const VkFramebufferCreateInfo* pCreateInfo,
5875                                     const VkAllocationCallbacks* pAllocator,
5876                                     VkFramebuffer* pFramebuffer) {
5877         auto device = unbox_VkDevice(boxed_device);
5878         auto deviceDispatch = dispatch_VkDevice(boxed_device);
5879 
5880         VkResult result =
5881             deviceDispatch->vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
5882         if (result != VK_SUCCESS) {
5883             return result;
5884         }
5885 
5886         std::lock_guard<std::recursive_mutex> lock(mLock);
5887 
5888         auto& framebufferInfo = mFramebufferInfo[*pFramebuffer];
5889         framebufferInfo.device = device;
5890 
5891         if ((pCreateInfo->flags & VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT) == 0) {
5892             // b/327522469
5893             // Track the Colorbuffers that would be written to.
5894             // It might be better to check for VK_QUEUE_FAMILY_EXTERNAL in pipeline barrier.
5895             // But the guest does not always add it to pipeline barrier.
5896             for (int i = 0; i < pCreateInfo->attachmentCount; i++) {
5897                 auto* imageViewInfo = android::base::find(mImageViewInfo, pCreateInfo->pAttachments[i]);
5898                 if (imageViewInfo->boundColorBuffer.has_value()) {
5899                     framebufferInfo.attachedColorBuffers.push_back(
5900                         imageViewInfo->boundColorBuffer.value());
5901                 }
5902             }
5903         }
5904 
5905         *pFramebuffer = new_boxed_non_dispatchable_VkFramebuffer(*pFramebuffer);
5906 
5907         return result;
5908     }
5909 
destroyFramebufferLocked(VkDevice device,VulkanDispatch * deviceDispatch,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)5910     void destroyFramebufferLocked(VkDevice device, VulkanDispatch* deviceDispatch,
5911                                   VkFramebuffer framebuffer,
5912                                   const VkAllocationCallbacks* pAllocator) {
5913         deviceDispatch->vkDestroyFramebuffer(device, framebuffer, pAllocator);
5914 
5915         mFramebufferInfo.erase(framebuffer);
5916     }
5917 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)5918     void on_vkDestroyFramebuffer(android::base::BumpPool* pool, VkDevice boxed_device,
5919                                  VkFramebuffer framebuffer,
5920                                  const VkAllocationCallbacks* pAllocator) {
5921         auto device = unbox_VkDevice(boxed_device);
5922         auto deviceDispatch = dispatch_VkDevice(boxed_device);
5923 
5924         std::lock_guard<std::recursive_mutex> lock(mLock);
5925         destroyFramebufferLocked(device, deviceDispatch, framebuffer, pAllocator);
5926     }
5927 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)5928     VkResult on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue boxed_queue,
5929                                   uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo,
5930                                   VkFence fence) {
5931         // If pBindInfo contains VkTimelineSemaphoreSubmitInfo, then it's
5932         // possible the host driver isn't equipped to deal with them yet.  To
5933         // work around this, send empty vkQueueSubmits before and after the
5934         // call to vkQueueBindSparse that contain the right values for
5935         // wait/signal semaphores and contains the user's
5936         // VkTimelineSemaphoreSubmitInfo structure, following the *submission
5937         // order* implied by the indices of pBindInfo.
5938 
5939         // TODO: Detect if we are running on a driver that supports timeline
5940         // semaphore signal/wait operations in vkQueueBindSparse
5941         const bool needTimelineSubmitInfoWorkaround = true;
5942         (void)needTimelineSubmitInfoWorkaround;
5943 
5944         bool hasTimelineSemaphoreSubmitInfo = false;
5945 
5946         for (uint32_t i = 0; i < bindInfoCount; ++i) {
5947             const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
5948                 vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
5949             if (tsSi) {
5950                 hasTimelineSemaphoreSubmitInfo = true;
5951             }
5952         }
5953 
5954         auto queue = unbox_VkQueue(boxed_queue);
5955         auto vk = dispatch_VkQueue(boxed_queue);
5956 
5957         if (!hasTimelineSemaphoreSubmitInfo) {
5958             (void)pool;
5959             return vk->vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
5960         } else {
5961             std::vector<VkPipelineStageFlags> waitDstStageMasks;
5962             VkTimelineSemaphoreSubmitInfoKHR currTsSi = {
5963                 VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, 0, 0, nullptr, 0, nullptr,
5964             };
5965 
5966             VkSubmitInfo currSi = {
5967                 VK_STRUCTURE_TYPE_SUBMIT_INFO,
5968                 &currTsSi,
5969                 0,
5970                 nullptr,
5971                 nullptr,
5972                 0,
5973                 nullptr,  // No commands
5974                 0,
5975                 nullptr,
5976             };
5977 
5978             VkBindSparseInfo currBi;
5979 
5980             VkResult res;
5981 
5982             for (uint32_t i = 0; i < bindInfoCount; ++i) {
5983                 const VkTimelineSemaphoreSubmitInfoKHR* tsSi =
5984                     vk_find_struct<VkTimelineSemaphoreSubmitInfoKHR>(pBindInfo + i);
5985                 if (!tsSi) {
5986                     res = vk->vkQueueBindSparse(queue, 1, pBindInfo + i, fence);
5987                     if (VK_SUCCESS != res) return res;
5988                     continue;
5989                 }
5990 
5991                 currTsSi.waitSemaphoreValueCount = tsSi->waitSemaphoreValueCount;
5992                 currTsSi.pWaitSemaphoreValues = tsSi->pWaitSemaphoreValues;
5993                 currTsSi.signalSemaphoreValueCount = 0;
5994                 currTsSi.pSignalSemaphoreValues = nullptr;
5995 
5996                 currSi.waitSemaphoreCount = pBindInfo[i].waitSemaphoreCount;
5997                 currSi.pWaitSemaphores = pBindInfo[i].pWaitSemaphores;
5998                 waitDstStageMasks.resize(pBindInfo[i].waitSemaphoreCount,
5999                                          VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT);
6000                 currSi.pWaitDstStageMask = waitDstStageMasks.data();
6001 
6002                 currSi.signalSemaphoreCount = 0;
6003                 currSi.pSignalSemaphores = nullptr;
6004 
6005                 res = vk->vkQueueSubmit(queue, 1, &currSi, nullptr);
6006                 if (VK_SUCCESS != res) return res;
6007 
6008                 currBi = pBindInfo[i];
6009 
6010                 vk_struct_chain_remove(tsSi, &currBi);
6011 
6012                 currBi.waitSemaphoreCount = 0;
6013                 currBi.pWaitSemaphores = nullptr;
6014                 currBi.signalSemaphoreCount = 0;
6015                 currBi.pSignalSemaphores = nullptr;
6016 
6017                 res = vk->vkQueueBindSparse(queue, 1, &currBi, nullptr);
6018                 if (VK_SUCCESS != res) return res;
6019 
6020                 currTsSi.waitSemaphoreValueCount = 0;
6021                 currTsSi.pWaitSemaphoreValues = nullptr;
6022                 currTsSi.signalSemaphoreValueCount = tsSi->signalSemaphoreValueCount;
6023                 currTsSi.pSignalSemaphoreValues = tsSi->pSignalSemaphoreValues;
6024 
6025                 currSi.waitSemaphoreCount = 0;
6026                 currSi.pWaitSemaphores = nullptr;
6027                 currSi.signalSemaphoreCount = pBindInfo[i].signalSemaphoreCount;
6028                 currSi.pSignalSemaphores = pBindInfo[i].pSignalSemaphores;
6029 
6030                 res =
6031                     vk->vkQueueSubmit(queue, 1, &currSi, i == bindInfoCount - 1 ? fence : nullptr);
6032                 if (VK_SUCCESS != res) return res;
6033             }
6034 
6035             return VK_SUCCESS;
6036         }
6037     }
6038 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)6039     void on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
6040                                          VkFormat format, VkDeviceSize* pOffset,
6041                                          VkDeviceSize* pRowPitchAlignment) {
6042         if (mPerFormatLinearImageProperties.find(format) == mPerFormatLinearImageProperties.end()) {
6043             VkDeviceSize offset = 0u;
6044             VkDeviceSize rowPitchAlignment = UINT_MAX;
6045 
6046             for (uint32_t width = 64; width <= 256; width++) {
6047                 LinearImageCreateInfo linearImageCreateInfo = {
6048                     .extent =
6049                         {
6050                             .width = width,
6051                             .height = 64,
6052                             .depth = 1,
6053                         },
6054                     .format = format,
6055                     .usage = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
6056                 };
6057 
6058                 VkDeviceSize currOffset = 0u;
6059                 VkDeviceSize currRowPitchAlignment = UINT_MAX;
6060 
6061                 VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
6062                 on_vkGetLinearImageLayout2GOOGLE(pool, boxed_device, &defaultVkImageCreateInfo,
6063                                                  &currOffset, &currRowPitchAlignment);
6064 
6065                 offset = currOffset;
6066                 rowPitchAlignment = std::min(currRowPitchAlignment, rowPitchAlignment);
6067             }
6068             mPerFormatLinearImageProperties[format] = LinearImageProperties{
6069                 .offset = offset,
6070                 .rowPitchAlignment = rowPitchAlignment,
6071             };
6072         }
6073 
6074         if (pOffset) {
6075             *pOffset = mPerFormatLinearImageProperties[format].offset;
6076         }
6077         if (pRowPitchAlignment) {
6078             *pRowPitchAlignment = mPerFormatLinearImageProperties[format].rowPitchAlignment;
6079         }
6080     }
6081 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)6082     void on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool, VkDevice boxed_device,
6083                                           const VkImageCreateInfo* pCreateInfo,
6084                                           VkDeviceSize* pOffset, VkDeviceSize* pRowPitchAlignment) {
6085         LinearImageCreateInfo linearImageCreateInfo = {
6086             .extent = pCreateInfo->extent,
6087             .format = pCreateInfo->format,
6088             .usage = pCreateInfo->usage,
6089         };
6090         if (mLinearImageProperties.find(linearImageCreateInfo) == mLinearImageProperties.end()) {
6091             auto device = unbox_VkDevice(boxed_device);
6092             auto vk = dispatch_VkDevice(boxed_device);
6093 
6094             VkImageSubresource subresource = {
6095                 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
6096                 .mipLevel = 0,
6097                 .arrayLayer = 0,
6098             };
6099 
6100             VkImage image;
6101             VkSubresourceLayout subresourceLayout;
6102 
6103             VkImageCreateInfo defaultVkImageCreateInfo = linearImageCreateInfo.toDefaultVk();
6104             VkResult result = vk->vkCreateImage(device, &defaultVkImageCreateInfo, nullptr, &image);
6105             if (result != VK_SUCCESS) {
6106                 fprintf(stderr, "vkCreateImage failed. size: (%u x %u) result: %d\n",
6107                         linearImageCreateInfo.extent.width, linearImageCreateInfo.extent.height,
6108                         result);
6109                 return;
6110             }
6111             vk->vkGetImageSubresourceLayout(device, image, &subresource, &subresourceLayout);
6112             vk->vkDestroyImage(device, image, nullptr);
6113 
6114             VkDeviceSize offset = subresourceLayout.offset;
6115             uint64_t rowPitch = subresourceLayout.rowPitch;
6116             VkDeviceSize rowPitchAlignment = rowPitch & (~rowPitch + 1);
6117 
6118             mLinearImageProperties[linearImageCreateInfo] = {
6119                 .offset = offset,
6120                 .rowPitchAlignment = rowPitchAlignment,
6121             };
6122         }
6123 
6124         if (pOffset != nullptr) {
6125             *pOffset = mLinearImageProperties[linearImageCreateInfo].offset;
6126         }
6127         if (pRowPitchAlignment != nullptr) {
6128             *pRowPitchAlignment = mLinearImageProperties[linearImageCreateInfo].rowPitchAlignment;
6129         }
6130     }
6131 
6132 #include "VkSubDecoder.cpp"
6133 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer boxed_commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)6134     void on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool, VkQueue queue,
6135                                        VkCommandBuffer boxed_commandBuffer, VkDeviceSize dataSize,
6136                                        const void* pData, const VkDecoderContext& context) {
6137         (void)queue;
6138 
6139         VkCommandBuffer commandBuffer = unbox_VkCommandBuffer(boxed_commandBuffer);
6140         VulkanDispatch* vk = dispatch_VkCommandBuffer(boxed_commandBuffer);
6141         VulkanMemReadingStream* readStream = readstream_VkCommandBuffer(boxed_commandBuffer);
6142         subDecode(readStream, vk, boxed_commandBuffer, commandBuffer, dataSize, pData, context);
6143     }
6144 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)6145     void on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool* pool, VkQueue queue,
6146                                                     VkCommandBuffer commandBuffer,
6147                                                     VkDeviceMemory deviceMemory,
6148                                                     VkDeviceSize dataOffset, VkDeviceSize dataSize,
6149                                                     const VkDecoderContext& context) {
6150         // TODO : implement
6151     }
getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch * vk,VkDevice device,VkDescriptorPool pool,VkDescriptorSetLayout setLayout,uint64_t poolId,uint32_t pendingAlloc,bool * didAlloc)6152     VkDescriptorSet getOrAllocateDescriptorSetFromPoolAndId(VulkanDispatch* vk, VkDevice device,
6153                                                             VkDescriptorPool pool,
6154                                                             VkDescriptorSetLayout setLayout,
6155                                                             uint64_t poolId, uint32_t pendingAlloc,
6156                                                             bool* didAlloc) {
6157         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pool);
6158         if (!poolInfo) {
6159             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
6160                 << "descriptor pool " << pool << " not found ";
6161         }
6162 
6163         DispatchableHandleInfo<uint64_t>* setHandleInfo = sBoxedHandleManager.get(poolId);
6164 
6165         if (setHandleInfo->underlying) {
6166             if (pendingAlloc) {
6167                 VkDescriptorSet allocedSet;
6168                 vk->vkFreeDescriptorSets(device, pool, 1,
6169                                          (VkDescriptorSet*)(&setHandleInfo->underlying));
6170                 VkDescriptorSetAllocateInfo dsAi = {
6171                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
6172                 };
6173                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
6174                 setHandleInfo->underlying = (uint64_t)allocedSet;
6175                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
6176                 *didAlloc = true;
6177                 return allocedSet;
6178             } else {
6179                 *didAlloc = false;
6180                 return (VkDescriptorSet)(setHandleInfo->underlying);
6181             }
6182         } else {
6183             if (pendingAlloc) {
6184                 VkDescriptorSet allocedSet;
6185                 VkDescriptorSetAllocateInfo dsAi = {
6186                     VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO, 0, pool, 1, &setLayout,
6187                 };
6188                 vk->vkAllocateDescriptorSets(device, &dsAi, &allocedSet);
6189                 setHandleInfo->underlying = (uint64_t)allocedSet;
6190                 initDescriptorSetInfoLocked(pool, setLayout, poolId, allocedSet);
6191                 *didAlloc = true;
6192                 return allocedSet;
6193             } else {
6194                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
6195                     << "descriptor pool " << pool << " wanted to get set with id 0x" << std::hex
6196                     << poolId;
6197                 return nullptr;
6198             }
6199         }
6200     }
6201 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue boxed_queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)6202     void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
6203         android::base::BumpPool* pool, VkQueue boxed_queue, uint32_t descriptorPoolCount,
6204         const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
6205         const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
6206         const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
6207         const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
6208         const VkWriteDescriptorSet* pPendingDescriptorWrites) {
6209         std::lock_guard<std::recursive_mutex> lock(mLock);
6210 
6211         VkDevice device;
6212 
6213         auto queue = unbox_VkQueue(boxed_queue);
6214         auto vk = dispatch_VkQueue(boxed_queue);
6215 
6216         auto* queueInfo = android::base::find(mQueueInfo, queue);
6217         if (queueInfo) {
6218             device = queueInfo->device;
6219         } else {
6220             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
6221                 << "queue " << queue << "(boxed: " << boxed_queue << ") with no device registered";
6222         }
6223         on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
6224             pool, vk, device, descriptorPoolCount, pDescriptorPools, descriptorSetCount,
6225             pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool,
6226             pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices,
6227             pendingDescriptorWriteCount, pPendingDescriptorWrites);
6228     }
6229 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VulkanDispatch * vk,VkDevice device,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)6230     void on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
6231         android::base::BumpPool* pool, VulkanDispatch* vk, VkDevice device,
6232         uint32_t descriptorPoolCount, const VkDescriptorPool* pDescriptorPools,
6233         uint32_t descriptorSetCount, const VkDescriptorSetLayout* pDescriptorSetLayouts,
6234         const uint64_t* pDescriptorSetPoolIds, const uint32_t* pDescriptorSetWhichPool,
6235         const uint32_t* pDescriptorSetPendingAllocation,
6236         const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
6237         const VkWriteDescriptorSet* pPendingDescriptorWrites) {
6238         std::vector<VkDescriptorSet> setsToUpdate(descriptorSetCount, nullptr);
6239 
6240         bool didAlloc = false;
6241 
6242         for (uint32_t i = 0; i < descriptorSetCount; ++i) {
6243             uint64_t poolId = pDescriptorSetPoolIds[i];
6244             uint32_t whichPool = pDescriptorSetWhichPool[i];
6245             uint32_t pendingAlloc = pDescriptorSetPendingAllocation[i];
6246             bool didAllocThisTime;
6247             setsToUpdate[i] = getOrAllocateDescriptorSetFromPoolAndId(
6248                 vk, device, pDescriptorPools[whichPool], pDescriptorSetLayouts[i], poolId,
6249                 pendingAlloc, &didAllocThisTime);
6250 
6251             if (didAllocThisTime) didAlloc = true;
6252         }
6253 
6254         if (didAlloc) {
6255             std::vector<VkWriteDescriptorSet> writeDescriptorSetsForHostDriver(
6256                 pendingDescriptorWriteCount);
6257             memcpy(writeDescriptorSetsForHostDriver.data(), pPendingDescriptorWrites,
6258                    pendingDescriptorWriteCount * sizeof(VkWriteDescriptorSet));
6259 
6260             for (uint32_t i = 0; i < descriptorSetCount; ++i) {
6261                 uint32_t writeStartIndex = pDescriptorWriteStartingIndices[i];
6262                 uint32_t writeEndIndex;
6263                 if (i == descriptorSetCount - 1) {
6264                     writeEndIndex = pendingDescriptorWriteCount;
6265                 } else {
6266                     writeEndIndex = pDescriptorWriteStartingIndices[i + 1];
6267                 }
6268                 for (uint32_t j = writeStartIndex; j < writeEndIndex; ++j) {
6269                     writeDescriptorSetsForHostDriver[j].dstSet = setsToUpdate[i];
6270                 }
6271             }
6272             this->on_vkUpdateDescriptorSetsImpl(
6273                 pool, vk, device, (uint32_t)writeDescriptorSetsForHostDriver.size(),
6274                 writeDescriptorSetsForHostDriver.data(), 0, nullptr);
6275         } else {
6276             this->on_vkUpdateDescriptorSetsImpl(pool, vk, device, pendingDescriptorWriteCount,
6277                                                 pPendingDescriptorWrites, 0, nullptr);
6278         }
6279     }
6280 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)6281     void on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool, VkDevice device,
6282                                              VkDescriptorPool descriptorPool,
6283                                              uint32_t* pPoolIdCount, uint64_t* pPoolIds) {
6284         std::lock_guard<std::recursive_mutex> lock(mLock);
6285         auto& info = mDescriptorPoolInfo[descriptorPool];
6286         *pPoolIdCount = (uint32_t)info.poolIds.size();
6287 
6288         if (pPoolIds) {
6289             for (uint32_t i = 0; i < info.poolIds.size(); ++i) {
6290                 pPoolIds[i] = info.poolIds[i];
6291             }
6292         }
6293     }
6294 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool *,VkDevice boxed_device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)6295     VkResult on_vkCreateSamplerYcbcrConversion(
6296         android::base::BumpPool*, VkDevice boxed_device,
6297         const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
6298         const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {
6299         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
6300             *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(
6301                 (VkSamplerYcbcrConversion)((uintptr_t)0xffff0000ull));
6302             return VK_SUCCESS;
6303         }
6304         auto device = unbox_VkDevice(boxed_device);
6305         auto vk = dispatch_VkDevice(boxed_device);
6306         VkResult res =
6307             vk->vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
6308         if (res != VK_SUCCESS) {
6309             return res;
6310         }
6311         *pYcbcrConversion = new_boxed_non_dispatchable_VkSamplerYcbcrConversion(*pYcbcrConversion);
6312         return VK_SUCCESS;
6313     }
6314 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice boxed_device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)6315     void on_vkDestroySamplerYcbcrConversion(android::base::BumpPool* pool, VkDevice boxed_device,
6316                                             VkSamplerYcbcrConversion ycbcrConversion,
6317                                             const VkAllocationCallbacks* pAllocator) {
6318         if (m_emu->enableYcbcrEmulation && !m_emu->deviceInfo.supportsSamplerYcbcrConversion) {
6319             return;
6320         }
6321         auto device = unbox_VkDevice(boxed_device);
6322         auto vk = dispatch_VkDevice(boxed_device);
6323         vk->vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
6324         return;
6325     }
6326 
on_DeviceLost()6327     void on_DeviceLost() { GFXSTREAM_ABORT(FatalError(VK_ERROR_DEVICE_LOST)); }
6328 
DeviceLostHandler()6329     void DeviceLostHandler() {}
6330 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize=std::nullopt)6331     void on_CheckOutOfMemory(VkResult result, uint32_t opCode, const VkDecoderContext& context,
6332                              std::optional<uint64_t> allocationSize = std::nullopt) {
6333         if (result == VK_ERROR_OUT_OF_HOST_MEMORY || result == VK_ERROR_OUT_OF_DEVICE_MEMORY ||
6334             result == VK_ERROR_OUT_OF_POOL_MEMORY) {
6335             context.metricsLogger->logMetricEvent(
6336                 MetricEventVulkanOutOfMemory{.vkResultCode = result,
6337                                              .opCode = std::make_optional(opCode),
6338                                              .allocationSize = allocationSize});
6339         }
6340     }
6341 
waitForFence(VkFence boxed_fence,uint64_t timeout)6342     VkResult waitForFence(VkFence boxed_fence, uint64_t timeout) {
6343         VkFence fence;
6344         VkDevice device;
6345         VulkanDispatch* vk;
6346         StaticLock* fenceLock;
6347         ConditionVariable* cv;
6348         {
6349             std::lock_guard<std::recursive_mutex> lock(mLock);
6350 
6351             fence = unbox_VkFence(boxed_fence);
6352             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
6353                 // No fence, could be a semaphore.
6354                 // TODO: Async wait for semaphores
6355                 return VK_SUCCESS;
6356             }
6357 
6358             // Vulkan specs require fences of vkQueueSubmit to be *externally
6359             // synchronized*, i.e. we cannot submit a queue while waiting for the
6360             // fence in another thread. For threads that call this function, they
6361             // have to wait until a vkQueueSubmit() using this fence is called
6362             // before calling vkWaitForFences(). So we use a conditional variable
6363             // and mutex for thread synchronization.
6364             //
6365             // See:
6366             // https://www.khronos.org/registry/vulkan/specs/1.2/html/vkspec.html#fundamentals-threadingbehavior
6367             // https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/issues/519
6368 
6369             device = mFenceInfo[fence].device;
6370             vk = mFenceInfo[fence].vk;
6371             fenceLock = &mFenceInfo[fence].lock;
6372             cv = &mFenceInfo[fence].cv;
6373         }
6374 
6375         fenceLock->lock();
6376         cv->wait(fenceLock, [this, fence] {
6377             std::lock_guard<std::recursive_mutex> lock(mLock);
6378             if (mFenceInfo[fence].state == FenceInfo::State::kWaitable) {
6379                 mFenceInfo[fence].state = FenceInfo::State::kWaiting;
6380                 return true;
6381             }
6382             return false;
6383         });
6384         fenceLock->unlock();
6385 
6386         {
6387             std::lock_guard<std::recursive_mutex> lock(mLock);
6388             if (mFenceInfo.find(fence) == mFenceInfo.end()) {
6389                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
6390                     << "Fence was destroyed before vkWaitForFences call.";
6391             }
6392         }
6393 
6394         return vk->vkWaitForFences(device, /* fenceCount */ 1u, &fence,
6395                                    /* waitAll */ false, timeout);
6396     }
6397 
getFenceStatus(VkFence boxed_fence)6398     VkResult getFenceStatus(VkFence boxed_fence) {
6399         VkDevice device;
6400         VkFence fence;
6401         VulkanDispatch* vk;
6402         {
6403             std::lock_guard<std::recursive_mutex> lock(mLock);
6404 
6405             fence = unbox_VkFence(boxed_fence);
6406             if (fence == VK_NULL_HANDLE || mFenceInfo.find(fence) == mFenceInfo.end()) {
6407                 // No fence, could be a semaphore.
6408                 // TODO: Async get status for semaphores
6409                 return VK_SUCCESS;
6410             }
6411 
6412             device = mFenceInfo[fence].device;
6413             vk = mFenceInfo[fence].vk;
6414         }
6415 
6416         return vk->vkGetFenceStatus(device, fence);
6417     }
6418 
registerQsriCallback(VkImage boxed_image,VkQsriTimeline::Callback callback)6419     AsyncResult registerQsriCallback(VkImage boxed_image, VkQsriTimeline::Callback callback) {
6420         VkImage image;
6421         std::shared_ptr<AndroidNativeBufferInfo> anbInfo;
6422         {
6423             std::lock_guard<std::recursive_mutex> lock(mLock);
6424 
6425             image = unbox_VkImage(boxed_image);
6426 
6427             if (mLogging) {
6428                 fprintf(stderr, "%s: for boxed image 0x%llx image %p\n", __func__,
6429                         (unsigned long long)boxed_image, image);
6430             }
6431 
6432             if (image == VK_NULL_HANDLE || mImageInfo.find(image) == mImageInfo.end()) {
6433                 // No image
6434                 return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
6435             }
6436 
6437             anbInfo = mImageInfo[image].anbInfo;  // shared ptr, take ref
6438         }
6439 
6440         if (!anbInfo) {
6441             fprintf(stderr, "%s: warning: image %p doesn't ahve anb info\n", __func__, image);
6442             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
6443         }
6444         if (!anbInfo->vk) {
6445             fprintf(stderr, "%s:%p warning: image %p anb info not initialized\n", __func__,
6446                     anbInfo.get(), image);
6447             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
6448         }
6449         // Could be null or mismatched image, check later
6450         if (image != anbInfo->image) {
6451             fprintf(stderr, "%s:%p warning: image %p anb info has wrong image: %p\n", __func__,
6452                     anbInfo.get(), image, anbInfo->image);
6453             return AsyncResult::FAIL_AND_CALLBACK_NOT_SCHEDULED;
6454         }
6455 
6456         anbInfo->qsriTimeline->registerCallbackForNextPresentAndPoll(std::move(callback));
6457 
6458         if (mLogging) {
6459             fprintf(stderr, "%s:%p Done registering\n", __func__, anbInfo.get());
6460         }
6461         return AsyncResult::OK_AND_CALLBACK_SCHEDULED;
6462     }
6463 
6464 #define GUEST_EXTERNAL_MEMORY_HANDLE_TYPES                                \
6465     (VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID | \
6466      VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA)
6467 
6468     // Transforms
6469     // If adding a new transform here, please check if it needs to be used in VkDecoderTestDispatch
6470 
transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties * props,uint32_t count)6471     void transformImpl_VkExternalMemoryProperties_tohost(const VkExternalMemoryProperties* props,
6472                                                          uint32_t count) {
6473         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
6474         for (uint32_t i = 0; i < count; ++i) {
6475             mut[i] = transformExternalMemoryProperties_tohost(mut[i]);
6476         }
6477     }
transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties * props,uint32_t count)6478     void transformImpl_VkExternalMemoryProperties_fromhost(const VkExternalMemoryProperties* props,
6479                                                            uint32_t count) {
6480         VkExternalMemoryProperties* mut = (VkExternalMemoryProperties*)props;
6481         for (uint32_t i = 0; i < count; ++i) {
6482             mut[i] = transformExternalMemoryProperties_fromhost(mut[i],
6483                                                                 GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);
6484         }
6485     }
6486 
transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo * pImageCreateInfos,uint32_t count)6487     void transformImpl_VkImageCreateInfo_tohost(const VkImageCreateInfo* pImageCreateInfos,
6488                                                 uint32_t count) {
6489         for (uint32_t i = 0; i < count; i++) {
6490             VkImageCreateInfo& imageCreateInfo =
6491                 const_cast<VkImageCreateInfo&>(pImageCreateInfos[i]);
6492             const VkExternalMemoryImageCreateInfo* pExternalMemoryImageCi =
6493                 vk_find_struct<VkExternalMemoryImageCreateInfo>(&imageCreateInfo);
6494             bool importAndroidHardwareBuffer =
6495                 pExternalMemoryImageCi &&
6496                 (pExternalMemoryImageCi->handleTypes &
6497                  VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID);
6498             const VkNativeBufferANDROID* pNativeBufferANDROID =
6499                 vk_find_struct<VkNativeBufferANDROID>(&imageCreateInfo);
6500 
6501             // If the VkImage is going to bind to a ColorBuffer, we have to make sure the VkImage
6502             // that backs the ColorBuffer is created with identical parameters. From the spec: If
6503             // two aliases are both images that were created with identical creation parameters,
6504             // both were created with the VK_IMAGE_CREATE_ALIAS_BIT flag set, and both are bound
6505             // identically to memory except for VkBindImageMemoryDeviceGroupInfo::pDeviceIndices and
6506             // VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions, then they interpret the
6507             // contents of the memory in consistent ways, and data written to one alias can be read
6508             // by the other alias. ... Aliases created by binding the same memory to resources in
6509             // multiple Vulkan instances or external APIs using external memory handle export and
6510             // import mechanisms interpret the contents of the memory in consistent ways, and data
6511             // written to one alias can be read by the other alias. Otherwise, the aliases interpret
6512             // the contents of the memory differently, ...
6513             std::unique_ptr<VkImageCreateInfo> colorBufferVkImageCi = nullptr;
6514             std::string importSource;
6515             VkFormat resolvedFormat = VK_FORMAT_UNDEFINED;
6516             // Use UNORM formats for SRGB format requests.
6517             switch (imageCreateInfo.format) {
6518                 case VK_FORMAT_R8G8B8A8_SRGB:
6519                     resolvedFormat = VK_FORMAT_R8G8B8A8_UNORM;
6520                     break;
6521                 case VK_FORMAT_R8G8B8_SRGB:
6522                     resolvedFormat = VK_FORMAT_R8G8B8_UNORM;
6523                     break;
6524                 case VK_FORMAT_B8G8R8A8_SRGB:
6525                     resolvedFormat = VK_FORMAT_B8G8R8A8_UNORM;
6526                     break;
6527                 case VK_FORMAT_R8_SRGB:
6528                     resolvedFormat = VK_FORMAT_R8_UNORM;
6529                     break;
6530                 default:
6531                     resolvedFormat = imageCreateInfo.format;
6532             }
6533             if (importAndroidHardwareBuffer) {
6534                 // For AHardwareBufferImage binding, we can't know which ColorBuffer this
6535                 // to-be-created VkImage will bind to, so we try our best to infer the creation
6536                 // parameters.
6537                 colorBufferVkImageCi = generateColorBufferVkImageCreateInfo(
6538                     resolvedFormat, imageCreateInfo.extent.width, imageCreateInfo.extent.height,
6539                     imageCreateInfo.tiling);
6540                 importSource = "AHardwareBuffer";
6541             } else if (pNativeBufferANDROID) {
6542                 // For native buffer binding, we can query the creation parameters from handle.
6543                 uint32_t cbHandle = *static_cast<const uint32_t*>(pNativeBufferANDROID->handle);
6544                 auto colorBufferInfo = getColorBufferInfo(cbHandle);
6545                 if (colorBufferInfo.handle == cbHandle) {
6546                     colorBufferVkImageCi =
6547                         std::make_unique<VkImageCreateInfo>(colorBufferInfo.imageCreateInfoShallow);
6548                 } else {
6549                     ERR("Unknown ColorBuffer handle: %" PRIu32 ".", cbHandle);
6550                 }
6551                 importSource = "NativeBufferANDROID";
6552             }
6553             if (!colorBufferVkImageCi) {
6554                 continue;
6555             }
6556             imageCreateInfo.format = resolvedFormat;
6557             if (imageCreateInfo.flags & (~colorBufferVkImageCi->flags)) {
6558                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageCreateFlags. "
6559                     "All supported VkImageCreateFlags are %s, the input VkImageCreateInfo requires "
6560                     "support for %s.",
6561                     importSource.c_str()?:"",
6562                     string_VkImageCreateFlags(colorBufferVkImageCi->flags).c_str()?:"",
6563                     string_VkImageCreateFlags(imageCreateInfo.flags).c_str()?:"");
6564             }
6565             imageCreateInfo.flags |= colorBufferVkImageCi->flags;
6566             if (imageCreateInfo.imageType != colorBufferVkImageCi->imageType) {
6567                 ERR("The VkImageCreateInfo to import %s has an unexpected VkImageType: %s, %s "
6568                     "expected.",
6569                     importSource.c_str()?:"", string_VkImageType(imageCreateInfo.imageType),
6570                     string_VkImageType(colorBufferVkImageCi->imageType));
6571             }
6572             if (imageCreateInfo.extent.depth != colorBufferVkImageCi->extent.depth) {
6573                 ERR("The VkImageCreateInfo to import %s has an unexpected VkExtent::depth: %" PRIu32
6574                     ", %" PRIu32 " expected.",
6575                     importSource.c_str()?:"", imageCreateInfo.extent.depth,
6576                     colorBufferVkImageCi->extent.depth);
6577             }
6578             if (imageCreateInfo.mipLevels != colorBufferVkImageCi->mipLevels) {
6579                 ERR("The VkImageCreateInfo to import %s has an unexpected mipLevels: %" PRIu32
6580                     ", %" PRIu32 " expected.",
6581                     importSource.c_str()?:"", imageCreateInfo.mipLevels,
6582                     colorBufferVkImageCi->mipLevels);
6583             }
6584             if (imageCreateInfo.arrayLayers != colorBufferVkImageCi->arrayLayers) {
6585                 ERR("The VkImageCreateInfo to import %s has an unexpected arrayLayers: %" PRIu32
6586                     ", %" PRIu32 " expected.",
6587                     importSource.c_str()?:"", imageCreateInfo.arrayLayers,
6588                     colorBufferVkImageCi->arrayLayers);
6589             }
6590             if (imageCreateInfo.samples != colorBufferVkImageCi->samples) {
6591                 ERR("The VkImageCreateInfo to import %s has an unexpected VkSampleCountFlagBits: "
6592                     "%s, %s expected.",
6593                     importSource.c_str()?:"", string_VkSampleCountFlagBits(imageCreateInfo.samples),
6594                     string_VkSampleCountFlagBits(colorBufferVkImageCi->samples));
6595             }
6596             if (imageCreateInfo.usage & (~colorBufferVkImageCi->usage)) {
6597                 ERR("The VkImageCreateInfo to import %s contains unsupported VkImageUsageFlags. "
6598                     "All supported VkImageUsageFlags are %s, the input VkImageCreateInfo requires "
6599                     "support for %s.",
6600                     importSource.c_str()?:"",
6601                     string_VkImageUsageFlags(colorBufferVkImageCi->usage).c_str()?:"",
6602                     string_VkImageUsageFlags(imageCreateInfo.usage).c_str()?:"");
6603             }
6604             imageCreateInfo.usage |= colorBufferVkImageCi->usage;
6605             // For the AndroidHardwareBuffer binding case VkImageCreateInfo::sharingMode isn't
6606             // filled in generateColorBufferVkImageCreateInfo, and
6607             // VkImageCreateInfo::{format,extent::{width, height}, tiling} are guaranteed to match.
6608             if (importAndroidHardwareBuffer) {
6609                 continue;
6610             }
6611             if (resolvedFormat != colorBufferVkImageCi->format) {
6612                 ERR("The VkImageCreateInfo to import %s contains unexpected VkFormat: %s. %s "
6613                     "expected.",
6614                     importSource.c_str()?:"", string_VkFormat(imageCreateInfo.format),
6615                     string_VkFormat(colorBufferVkImageCi->format));
6616             }
6617             if (imageCreateInfo.extent.width != colorBufferVkImageCi->extent.width) {
6618                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::width: "
6619                     "%" PRIu32 ". %" PRIu32 " expected.",
6620                     importSource.c_str()?:"", imageCreateInfo.extent.width,
6621                     colorBufferVkImageCi->extent.width);
6622             }
6623             if (imageCreateInfo.extent.height != colorBufferVkImageCi->extent.height) {
6624                 ERR("The VkImageCreateInfo to import %s contains unexpected VkExtent::height: "
6625                     "%" PRIu32 ". %" PRIu32 " expected.",
6626                     importSource.c_str()?:"", imageCreateInfo.extent.height,
6627                     colorBufferVkImageCi->extent.height);
6628             }
6629             if (imageCreateInfo.tiling != colorBufferVkImageCi->tiling) {
6630                 ERR("The VkImageCreateInfo to import %s contains unexpected VkImageTiling: %s. %s "
6631                     "expected.",
6632                     importSource.c_str()?:"", string_VkImageTiling(imageCreateInfo.tiling),
6633                     string_VkImageTiling(colorBufferVkImageCi->tiling));
6634             }
6635             if (imageCreateInfo.sharingMode != colorBufferVkImageCi->sharingMode) {
6636                 ERR("The VkImageCreateInfo to import %s contains unexpected VkSharingMode: %s. %s "
6637                     "expected.",
6638                     importSource.c_str()?:"", string_VkSharingMode(imageCreateInfo.sharingMode),
6639                     string_VkSharingMode(colorBufferVkImageCi->sharingMode));
6640             }
6641         }
6642     }
6643 
transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo *,uint32_t)6644     void transformImpl_VkImageCreateInfo_fromhost(const VkImageCreateInfo*, uint32_t) {
6645         GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER)) << "Not yet implemented.";
6646     }
6647 
6648 #define DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(type, field)                                         \
6649     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                        \
6650         type* mut = (type*)props;                                                                  \
6651         for (uint32_t i = 0; i < count; ++i) {                                                     \
6652             mut[i].field =                                                                         \
6653                 (VkExternalMemoryHandleTypeFlagBits)transformExternalMemoryHandleTypeFlags_tohost( \
6654                     mut[i].field);                                                                 \
6655         }                                                                                          \
6656     }                                                                                              \
6657     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {                      \
6658         type* mut = (type*)props;                                                                  \
6659         for (uint32_t i = 0; i < count; ++i) {                                                     \
6660             mut[i].field = (VkExternalMemoryHandleTypeFlagBits)                                    \
6661                 transformExternalMemoryHandleTypeFlags_fromhost(                                   \
6662                     mut[i].field, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);                             \
6663         }                                                                                          \
6664     }
6665 
6666 #define DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(type)                                  \
6667     void transformImpl_##type##_tohost(const type* props, uint32_t count) {                \
6668         type* mut = (type*)props;                                                          \
6669         for (uint32_t i = 0; i < count; ++i) {                                             \
6670             mut[i].externalMemoryProperties =                                              \
6671                 transformExternalMemoryProperties_tohost(mut[i].externalMemoryProperties); \
6672         }                                                                                  \
6673     }                                                                                      \
6674     void transformImpl_##type##_fromhost(const type* props, uint32_t count) {              \
6675         type* mut = (type*)props;                                                          \
6676         for (uint32_t i = 0; i < count; ++i) {                                             \
6677             mut[i].externalMemoryProperties = transformExternalMemoryProperties_fromhost(  \
6678                 mut[i].externalMemoryProperties, GUEST_EXTERNAL_MEMORY_HANDLE_TYPES);      \
6679         }                                                                                  \
6680     }
6681 
DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo,handleType)6682     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalImageFormatInfo, handleType)
6683     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkPhysicalDeviceExternalBufferInfo, handleType)
6684     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryImageCreateInfo, handleTypes)
6685     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExternalMemoryBufferCreateInfo, handleTypes)
6686     DEFINE_EXTERNAL_HANDLE_TYPE_TRANSFORM(VkExportMemoryAllocateInfo, handleTypes)
6687     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalImageFormatProperties)
6688     DEFINE_EXTERNAL_MEMORY_PROPERTIES_TRANSFORM(VkExternalBufferProperties)
6689 
6690     uint64_t newGlobalHandle(const DispatchableHandleInfo<uint64_t>& item,
6691                              BoxedHandleTypeTag typeTag) {
6692         if (!mCreatedHandlesForSnapshotLoad.empty() &&
6693             (mCreatedHandlesForSnapshotLoad.size() - mCreatedHandlesForSnapshotLoadIndex > 0)) {
6694             auto handle = mCreatedHandlesForSnapshotLoad[mCreatedHandlesForSnapshotLoadIndex];
6695             VKDGS_LOG("use handle: 0x%lx underlying 0x%lx", handle, item.underlying);
6696             ++mCreatedHandlesForSnapshotLoadIndex;
6697             auto res = sBoxedHandleManager.addFixed(handle, item, typeTag);
6698 
6699             return res;
6700         } else {
6701             return sBoxedHandleManager.add(item, typeTag);
6702         }
6703     }
6704 
6705 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL(type)                                           \
6706     type new_boxed_##type(type underlying, VulkanDispatch* dispatch, bool ownDispatch) {          \
6707         DispatchableHandleInfo<uint64_t> item;                                                    \
6708         item.underlying = (uint64_t)underlying;                                                   \
6709         item.dispatch = dispatch ? dispatch : new VulkanDispatch;                                 \
6710         item.ownDispatch = ownDispatch;                                                           \
6711         item.ordMaintInfo = new OrderMaintenanceInfo;                                             \
6712         item.readStream = nullptr;                                                                \
6713         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
6714         return res;                                                                               \
6715     }                                                                                             \
6716     void delete_##type(type boxed) {                                                              \
6717         if (!boxed) return;                                                                       \
6718         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
6719         if (!elt) return;                                                                         \
6720         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
6721         if (elt->readStream) {                                                                    \
6722             sReadStreamRegistry.push(elt->readStream);                                            \
6723             elt->readStream = nullptr;                                                            \
6724         }                                                                                         \
6725         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
6726     }                                                                                             \
6727     type unbox_##type(type boxed) {                                                               \
6728         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
6729         if (!elt) return VK_NULL_HANDLE;                                                          \
6730         return (type)elt->underlying;                                                             \
6731     }                                                                                             \
6732     OrderMaintenanceInfo* ordmaint_##type(type boxed) {                                           \
6733         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
6734         if (!elt) return 0;                                                                       \
6735         auto info = elt->ordMaintInfo;                                                            \
6736         if (!info) return 0;                                                                      \
6737         acquireOrderMaintInfo(info);                                                              \
6738         return info;                                                                              \
6739     }                                                                                             \
6740     VulkanMemReadingStream* readstream_##type(type boxed) {                                       \
6741         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
6742         if (!elt) return 0;                                                                       \
6743         auto stream = elt->readStream;                                                            \
6744         if (!stream) {                                                                            \
6745             stream = sReadStreamRegistry.pop(getFeatures());                                      \
6746             elt->readStream = stream;                                                             \
6747         }                                                                                         \
6748         return stream;                                                                            \
6749     }                                                                                             \
6750     type unboxed_to_boxed_##type(type unboxed) {                                                  \
6751         AutoLock lock(sBoxedHandleManager.lock);                                                  \
6752         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
6753     }                                                                                             \
6754     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
6755         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
6756         if (!elt) {                                                                               \
6757             fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed);                     \
6758             return nullptr;                                                                       \
6759         }                                                                                         \
6760         return elt->dispatch;                                                                     \
6761     }
6762 
6763 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL(type)                                       \
6764     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
6765         DispatchableHandleInfo<uint64_t> item;                                                    \
6766         item.underlying = (uint64_t)underlying;                                                   \
6767         auto res = (type)newGlobalHandle(item, Tag_##type);                                       \
6768         return res;                                                                               \
6769     }                                                                                             \
6770     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
6771         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
6772     }                                                                                             \
6773     void delete_##type(type boxed) { sBoxedHandleManager.remove((uint64_t)boxed); }               \
6774     void set_boxed_non_dispatchable_##type(type boxed, type underlying) {                         \
6775         DispatchableHandleInfo<uint64_t> item;                                                    \
6776         item.underlying = (uint64_t)underlying;                                                   \
6777         sBoxedHandleManager.update((uint64_t)boxed, item, Tag_##type);                          \
6778     }                                                                                             \
6779     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
6780         AutoLock lock(sBoxedHandleManager.lock);                                                  \
6781         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
6782     }                                                                                             \
6783     type unbox_##type(type boxed) {                                                               \
6784         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
6785         if (!elt) {                                                                               \
6786             if constexpr (!std::is_same_v<type, VkFence>) {                                       \
6787                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                   \
6788                     << "Unbox " << boxed << " failed, not found.";                                \
6789             }                                                                                     \
6790             return VK_NULL_HANDLE;                                                                \
6791         }                                                                                         \
6792         return (type)elt->underlying;                                                             \
6793     }
6794 
6795     GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_IMPL)
GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)6796     GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_IMPL)
6797 
6798     VkDecoderSnapshot* snapshot() { return &mSnapshot; }
getSnapshotState()6799     SnapshotState getSnapshotState() { return mSnapshotState; }
6800 
6801    private:
isEmulatedInstanceExtension(const char * name) const6802     bool isEmulatedInstanceExtension(const char* name) const {
6803         for (auto emulatedExt : kEmulatedInstanceExtensions) {
6804             if (!strcmp(emulatedExt, name)) return true;
6805         }
6806         return false;
6807     }
6808 
isEmulatedDeviceExtension(const char * name) const6809     bool isEmulatedDeviceExtension(const char* name) const {
6810         for (auto emulatedExt : kEmulatedDeviceExtensions) {
6811             if (!strcmp(emulatedExt, name)) return true;
6812         }
6813         return false;
6814     }
6815 
supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags)6816     bool supportEmulatedCompressedImageFormatProperty(VkFormat compressedFormat, VkImageType type,
6817                                                       VkImageTiling tiling, VkImageUsageFlags usage,
6818                                                       VkImageCreateFlags flags) {
6819         // BUG: 139193497
6820         return !(usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(type == VK_IMAGE_TYPE_1D);
6821     }
6822 
filteredDeviceExtensionNames(VulkanDispatch * vk,VkPhysicalDevice physicalDevice,uint32_t count,const char * const * extNames)6823     std::vector<const char*> filteredDeviceExtensionNames(VulkanDispatch* vk,
6824                                                           VkPhysicalDevice physicalDevice,
6825                                                           uint32_t count,
6826                                                           const char* const* extNames) {
6827         std::vector<const char*> res;
6828         std::vector<VkExtensionProperties> properties;
6829         VkResult result;
6830 
6831         for (uint32_t i = 0; i < count; ++i) {
6832             auto extName = extNames[i];
6833             if (!isEmulatedDeviceExtension(extName)) {
6834                 res.push_back(extName);
6835                 continue;
6836             }
6837         }
6838 
6839         result = enumerateDeviceExtensionProperties(vk, physicalDevice, nullptr, properties);
6840         if (result != VK_SUCCESS) {
6841             VKDGS_LOG("failed to enumerate device extensions");
6842             return res;
6843         }
6844 
6845         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME)) {
6846             res.push_back(VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME);
6847         }
6848 
6849         if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME)) {
6850             res.push_back(VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME);
6851         }
6852 
6853         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME)) {
6854             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME);
6855         }
6856 
6857         if (hasDeviceExtension(properties, VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME)) {
6858             res.push_back(VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME);
6859         }
6860 
6861         if (hasDeviceExtension(properties, VK_KHR_SWAPCHAIN_EXTENSION_NAME)) {
6862             res.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
6863         }
6864 
6865 #ifdef _WIN32
6866         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME)) {
6867             res.push_back(VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME);
6868         }
6869 
6870         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME)) {
6871             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME);
6872         }
6873 #elif defined(__QNX__)
6874         // Note: VK_QNX_external_memory_screen_buffer is not supported in API translation,
6875         // decoding, etc. However, push name to indicate external memory support to guest
6876         if (hasDeviceExtension(properties, VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME)) {
6877             res.push_back(VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME);
6878             // EXT_queue_family_foreign is a pre-requisite for QNX_external_memory_screen_buffer
6879             if (hasDeviceExtension(properties, VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME)) {
6880                 res.push_back(VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME);
6881             }
6882         }
6883 
6884         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
6885             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
6886         }
6887 #elif __unix__
6888         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME)) {
6889             res.push_back(VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME);
6890         }
6891 
6892         if (hasDeviceExtension(properties, VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME)) {
6893             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME);
6894         }
6895 #elif defined(__APPLE__)
6896         if (m_emu->instanceSupportsMoltenVK) {
6897             if (hasDeviceExtension(properties, VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME)) {
6898                 res.push_back(VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME);
6899             }
6900             if (hasDeviceExtension(properties, VK_EXT_METAL_OBJECTS_EXTENSION_NAME)) {
6901                 res.push_back(VK_EXT_METAL_OBJECTS_EXTENSION_NAME);
6902             }
6903         }
6904 #endif
6905 
6906 #ifdef __linux__
6907         // A dma-buf is a Linux kernel construct, commonly used with open-source DRM drivers.
6908         // See https://docs.kernel.org/driver-api/dma-buf.html for details.
6909         if (hasDeviceExtension(properties, VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME)) {
6910             res.push_back(VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME);
6911         }
6912 #endif
6913         return res;
6914     }
6915 
filteredInstanceExtensionNames(uint32_t count,const char * const * extNames)6916     std::vector<const char*> filteredInstanceExtensionNames(uint32_t count,
6917                                                             const char* const* extNames) {
6918         std::vector<const char*> res;
6919         for (uint32_t i = 0; i < count; ++i) {
6920             auto extName = extNames[i];
6921             if (!isEmulatedInstanceExtension(extName)) {
6922                 res.push_back(extName);
6923             }
6924         }
6925 
6926         if (m_emu->instanceSupportsExternalMemoryCapabilities) {
6927             res.push_back(VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME);
6928         }
6929 
6930         if (m_emu->instanceSupportsExternalSemaphoreCapabilities) {
6931             res.push_back(VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME);
6932         }
6933 
6934         if (m_emu->debugUtilsAvailableAndRequested) {
6935             res.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
6936         }
6937 
6938         if (m_emu->instanceSupportsSurface) {
6939             res.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
6940         }
6941 
6942 #if defined(__APPLE__)
6943         if (m_emu->instanceSupportsMoltenVK) {
6944             res.push_back(VK_MVK_MACOS_SURFACE_EXTENSION_NAME);
6945             res.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
6946         }
6947 #endif
6948 
6949         return res;
6950     }
6951 
getDefaultQueueForDeviceLocked(VkDevice device,VkQueue * queue,uint32_t * queueFamilyIndex,Lock ** queueLock)6952     bool getDefaultQueueForDeviceLocked(VkDevice device, VkQueue* queue, uint32_t* queueFamilyIndex,
6953                                         Lock** queueLock) {
6954         auto* deviceInfo = android::base::find(mDeviceInfo, device);
6955         if (!deviceInfo) return false;
6956 
6957         auto zeroIt = deviceInfo->queues.find(0);
6958         if (zeroIt == deviceInfo->queues.end() || zeroIt->second.empty()) {
6959             // Get the first queue / queueFamilyIndex
6960             // that does show up.
6961             for (const auto& it : deviceInfo->queues) {
6962                 auto index = it.first;
6963                 for (auto& deviceQueue : it.second) {
6964                     *queue = deviceQueue;
6965                     *queueFamilyIndex = index;
6966                     *queueLock = mQueueInfo.at(deviceQueue).lock;
6967                     return true;
6968                 }
6969             }
6970             // Didn't find anything, fail.
6971             return false;
6972         } else {
6973             // Use queue family index 0.
6974             *queue = zeroIt->second[0];
6975             *queueFamilyIndex = 0;
6976             *queueLock = mQueueInfo.at(zeroIt->second[0]).lock;
6977             return true;
6978         }
6979 
6980         return false;
6981     }
6982 
updateImageMemorySizeLocked(VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)6983     void updateImageMemorySizeLocked(VkDevice device, VkImage image,
6984                                      VkMemoryRequirements* pMemoryRequirements) {
6985         auto* deviceInfo = android::base::find(mDeviceInfo, device);
6986         if (!deviceInfo->emulateTextureEtc2 && !deviceInfo->emulateTextureAstc) {
6987             return;
6988         }
6989         auto* imageInfo = android::base::find(mImageInfo, image);
6990         if (!imageInfo) return;
6991         CompressedImageInfo& cmpInfo = imageInfo->cmpInfo;
6992         if (!deviceInfo->needEmulatedDecompression(cmpInfo)) {
6993             return;
6994         }
6995         *pMemoryRequirements = cmpInfo.getMemoryRequirements();
6996     }
6997 
6998     // Whether the VkInstance associated with this physical device was created by ANGLE
isAngleInstance(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)6999     bool isAngleInstance(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
7000         std::lock_guard<std::recursive_mutex> lock(mLock);
7001         VkInstance* instance = android::base::find(mPhysicalDeviceToInstance, physicalDevice);
7002         if (!instance) return false;
7003         InstanceInfo* instanceInfo = android::base::find(mInstanceInfo, *instance);
7004         if (!instanceInfo) return false;
7005         return instanceInfo->isAngle;
7006     }
7007 
enableEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)7008     bool enableEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
7009         if (!m_emu->enableEtc2Emulation) return false;
7010 
7011         // Don't enable ETC2 emulation for ANGLE, let it do its own emulation.
7012         return !isAngleInstance(physicalDevice, vk);
7013     }
7014 
enableEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)7015     bool enableEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
7016         if (m_emu->astcLdrEmulationMode == AstcEmulationMode::Disabled) {
7017             return false;
7018         }
7019 
7020         // Don't enable ASTC emulation for ANGLE, let it do its own emulation.
7021         return !isAngleInstance(physicalDevice, vk);
7022     }
7023 
needEmulatedEtc2(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)7024     bool needEmulatedEtc2(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
7025         if (!enableEmulatedEtc2(physicalDevice, vk)) {
7026             return false;
7027         }
7028         VkPhysicalDeviceFeatures feature;
7029         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
7030         return !feature.textureCompressionETC2;
7031     }
7032 
needEmulatedAstc(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)7033     bool needEmulatedAstc(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
7034         if (!enableEmulatedAstc(physicalDevice, vk)) {
7035             return false;
7036         }
7037         VkPhysicalDeviceFeatures feature;
7038         vk->vkGetPhysicalDeviceFeatures(physicalDevice, &feature);
7039         return !feature.textureCompressionASTC_LDR;
7040     }
7041 
supportsSwapchainMaintenance1(VkPhysicalDevice physicalDevice,VulkanDispatch * vk)7042     bool supportsSwapchainMaintenance1(VkPhysicalDevice physicalDevice, VulkanDispatch* vk) {
7043         bool hasGetPhysicalDeviceFeatures2 = false;
7044         bool hasGetPhysicalDeviceFeatures2KHR = false;
7045 
7046         {
7047             std::lock_guard<std::recursive_mutex> lock(mLock);
7048 
7049             auto* physdevInfo = android::base::find(mPhysdevInfo, physicalDevice);
7050             if (!physdevInfo) {
7051                 return false;
7052             }
7053 
7054             auto instance = mPhysicalDeviceToInstance[physicalDevice];
7055             auto* instanceInfo = android::base::find(mInstanceInfo, instance);
7056             if (!instanceInfo) {
7057                 return false;
7058             }
7059 
7060             if (instanceInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0) &&
7061                 physdevInfo->props.apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
7062                 hasGetPhysicalDeviceFeatures2 = true;
7063             } else if (hasInstanceExtension(instance,
7064                                             VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME)) {
7065                 hasGetPhysicalDeviceFeatures2KHR = true;
7066             } else {
7067                 return false;
7068             }
7069         }
7070 
7071         VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT swapchainMaintenance1Features = {
7072             .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT,
7073             .pNext = nullptr,
7074             .swapchainMaintenance1 = VK_FALSE,
7075         };
7076         VkPhysicalDeviceFeatures2 features2 = {
7077             .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
7078             .pNext = &swapchainMaintenance1Features,
7079         };
7080         if (hasGetPhysicalDeviceFeatures2) {
7081             vk->vkGetPhysicalDeviceFeatures2(physicalDevice, &features2);
7082         } else if (hasGetPhysicalDeviceFeatures2KHR) {
7083             vk->vkGetPhysicalDeviceFeatures2KHR(physicalDevice, &features2);
7084         } else {
7085             return false;
7086         }
7087 
7088         return swapchainMaintenance1Features.swapchainMaintenance1 == VK_TRUE;
7089     }
7090 
isEmulatedCompressedTexture(VkFormat format,VkPhysicalDevice physicalDevice,VulkanDispatch * vk)7091     bool isEmulatedCompressedTexture(VkFormat format, VkPhysicalDevice physicalDevice,
7092                                      VulkanDispatch* vk) {
7093         return (gfxstream::vk::isEtc2(format) && needEmulatedEtc2(physicalDevice, vk)) ||
7094                (gfxstream::vk::isAstc(format) && needEmulatedAstc(physicalDevice, vk));
7095     }
7096 
7097     static const VkFormatFeatureFlags kEmulatedTextureBufferFeatureMask =
7098         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
7099         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
7100 
7101     static const VkFormatFeatureFlags kEmulatedTextureOptimalTilingMask =
7102         VK_FORMAT_FEATURE_TRANSFER_SRC_BIT | VK_FORMAT_FEATURE_TRANSFER_DST_BIT |
7103         VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT |
7104         VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;
7105 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties * pFormatProp)7106     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties* pFormatProp) {
7107         pFormatProp->linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
7108         pFormatProp->optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
7109         pFormatProp->bufferFeatures &= kEmulatedTextureBufferFeatureMask;
7110     }
7111 
maskFormatPropertiesForEmulatedTextures(VkFormatProperties2 * pFormatProp)7112     void maskFormatPropertiesForEmulatedTextures(VkFormatProperties2* pFormatProp) {
7113         pFormatProp->formatProperties.linearTilingFeatures &= kEmulatedTextureBufferFeatureMask;
7114         pFormatProp->formatProperties.optimalTilingFeatures &= kEmulatedTextureOptimalTilingMask;
7115         pFormatProp->formatProperties.bufferFeatures &= kEmulatedTextureBufferFeatureMask;
7116     }
7117 
maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties * pProperties)7118     void maskImageFormatPropertiesForEmulatedTextures(VkImageFormatProperties* pProperties) {
7119         // dEQP-VK.api.info.image_format_properties.2d.optimal#etc2_r8g8b8_unorm_block
7120         pProperties->sampleCounts &= VK_SAMPLE_COUNT_1_BIT;
7121     }
7122 
7123     template <class VkFormatProperties1or2>
getPhysicalDeviceFormatPropertiesCore(std::function<void (VkPhysicalDevice,VkFormat,VkFormatProperties1or2 *)> getPhysicalDeviceFormatPropertiesFunc,VulkanDispatch * vk,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties1or2 * pFormatProperties)7124     void getPhysicalDeviceFormatPropertiesCore(
7125         std::function<void(VkPhysicalDevice, VkFormat, VkFormatProperties1or2*)>
7126             getPhysicalDeviceFormatPropertiesFunc,
7127         VulkanDispatch* vk, VkPhysicalDevice physicalDevice, VkFormat format,
7128         VkFormatProperties1or2* pFormatProperties) {
7129         if (isEmulatedCompressedTexture(format, physicalDevice, vk)) {
7130             getPhysicalDeviceFormatPropertiesFunc(
7131                 physicalDevice, CompressedImageInfo::getOutputFormat(format),
7132                 pFormatProperties);
7133             maskFormatPropertiesForEmulatedTextures(pFormatProperties);
7134             return;
7135         }
7136         getPhysicalDeviceFormatPropertiesFunc(physicalDevice, format, pFormatProperties);
7137     }
7138 
executePreprocessRecursive(int level,VkCommandBuffer cmdBuffer)7139     void executePreprocessRecursive(int level, VkCommandBuffer cmdBuffer) {
7140         auto* cmdBufferInfo = android::base::find(mCmdBufferInfo, cmdBuffer);
7141         if (!cmdBufferInfo) return;
7142         for (const auto& func : cmdBufferInfo->preprocessFuncs) {
7143             func();
7144         }
7145         // TODO: fix
7146         // for (const auto& subCmd : cmdBufferInfo->subCmds) {
7147         // executePreprocessRecursive(level + 1, subCmd);
7148         // }
7149     }
7150 
executePreprocessRecursive(const VkSubmitInfo & submit)7151     void executePreprocessRecursive(const VkSubmitInfo& submit) {
7152         for (uint32_t c = 0; c < submit.commandBufferCount; c++) {
7153             executePreprocessRecursive(0, submit.pCommandBuffers[c]);
7154         }
7155     }
7156 
executePreprocessRecursive(const VkSubmitInfo2 & submit)7157     void executePreprocessRecursive(const VkSubmitInfo2& submit) {
7158         for (uint32_t c = 0; c < submit.commandBufferInfoCount; c++) {
7159             executePreprocessRecursive(0, submit.pCommandBufferInfos[c].commandBuffer);
7160         }
7161     }
7162 
7163     template <typename VkHandleToInfoMap,
7164               typename HandleType = typename std::decay_t<VkHandleToInfoMap>::key_type>
findDeviceObjects(VkDevice device,const VkHandleToInfoMap & map)7165     std::vector<HandleType> findDeviceObjects(VkDevice device, const VkHandleToInfoMap& map) {
7166         std::vector<HandleType> objectsFromDevice;
7167         for (const auto& [objectHandle, objectInfo] : map) {
7168             if (objectInfo.device == device) {
7169                 objectsFromDevice.push_back(objectHandle);
7170             }
7171         }
7172         return objectsFromDevice;
7173     }
7174 
7175     template <typename VkHandleToInfoMap, typename InfoMemberType,
7176               typename HandleType = typename std::decay_t<VkHandleToInfoMap>::key_type,
7177               typename InfoType = typename std::decay_t<VkHandleToInfoMap>::value_type>
findDeviceObjects(VkDevice device,const VkHandleToInfoMap & map,InfoMemberType InfoType::* member)7178     std::vector<std::pair<HandleType, InfoMemberType>> findDeviceObjects(
7179         VkDevice device, const VkHandleToInfoMap& map, InfoMemberType InfoType::*member) {
7180         std::vector<std::pair<HandleType, InfoMemberType>> objectsFromDevice;
7181         for (const auto& [objectHandle, objectInfo] : map) {
7182             if (objectInfo.device == device) {
7183                 objectsFromDevice.emplace_back(objectHandle, objectInfo.*member);
7184             }
7185         }
7186         return objectsFromDevice;
7187     }
7188 
teardownInstanceLocked(VkInstance instance)7189     void teardownInstanceLocked(VkInstance instance) {
7190         std::vector<VkDevice> devicesToDestroy;
7191         std::vector<VulkanDispatch*> devicesToDestroyDispatches;
7192 
7193         for (auto it : mDeviceToPhysicalDevice) {
7194             auto* otherInstance = android::base::find(mPhysicalDeviceToInstance, it.second);
7195             if (!otherInstance) continue;
7196 
7197             if (instance == *otherInstance) {
7198                 devicesToDestroy.push_back(it.first);
7199                 devicesToDestroyDispatches.push_back(
7200                     dispatch_VkDevice(mDeviceInfo[it.first].boxed));
7201             }
7202         }
7203 
7204         for (uint32_t i = 0; i < devicesToDestroy.size(); ++i) {
7205             VkDevice deviceToDestroy = devicesToDestroy[i];
7206             VulkanDispatch* deviceToDestroyDispatch = devicesToDestroyDispatches[i];
7207 
7208             // https://bugs.chromium.org/p/chromium/issues/detail?id=1074600
7209             // it's important to idle the device before destroying it!
7210             deviceToDestroyDispatch->vkDeviceWaitIdle(deviceToDestroy);
7211 
7212             for (auto semaphore : findDeviceObjects(deviceToDestroy, mSemaphoreInfo)) {
7213                 destroySemaphoreLocked(deviceToDestroy, deviceToDestroyDispatch, semaphore,
7214                                        nullptr);
7215             }
7216 
7217             for (auto sampler : findDeviceObjects(deviceToDestroy, mSamplerInfo)) {
7218                 destroySamplerLocked(deviceToDestroy, deviceToDestroyDispatch, sampler, nullptr);
7219             }
7220 
7221             for (auto buffer : findDeviceObjects(deviceToDestroy, mBufferInfo)) {
7222                 deviceToDestroyDispatch->vkDestroyBuffer(deviceToDestroy, buffer, nullptr);
7223                 mBufferInfo.erase(buffer);
7224             }
7225 
7226             for (auto imageView : findDeviceObjects(deviceToDestroy, mImageViewInfo)) {
7227                 deviceToDestroyDispatch->vkDestroyImageView(deviceToDestroy, imageView, nullptr);
7228                 mImageViewInfo.erase(imageView);
7229             }
7230 
7231             for (auto image : findDeviceObjects(deviceToDestroy, mImageInfo)) {
7232                 destroyImageLocked(deviceToDestroy, deviceToDestroyDispatch, image, nullptr);
7233             }
7234 
7235             for (auto memory : findDeviceObjects(deviceToDestroy, mMemoryInfo)) {
7236                 freeMemoryLocked(deviceToDestroyDispatch, deviceToDestroy, memory, nullptr);
7237             }
7238 
7239             for (auto [commandBuffer, commandPool] :
7240                  findDeviceObjects(deviceToDestroy, mCmdBufferInfo, &CommandBufferInfo::cmdPool)) {
7241                 // The command buffer is freed with the vkDestroyCommandPool() below.
7242                 delete_VkCommandBuffer(unboxed_to_boxed_VkCommandBuffer(commandBuffer));
7243                 mCmdBufferInfo.erase(commandBuffer);
7244             }
7245 
7246             for (auto [commandPool, commandPoolBoxed] :
7247                  findDeviceObjects(deviceToDestroy, mCmdPoolInfo, &CommandPoolInfo::boxed)) {
7248                 deviceToDestroyDispatch->vkDestroyCommandPool(deviceToDestroy, commandPool,
7249                                                               nullptr);
7250                 delete_VkCommandPool(commandPoolBoxed);
7251                 mCmdPoolInfo.erase(commandPool);
7252             }
7253 
7254             for (auto [descriptorPool, descriptorPoolBoxed] : findDeviceObjects(
7255                      deviceToDestroy, mDescriptorPoolInfo, &DescriptorPoolInfo::boxed)) {
7256                 cleanupDescriptorPoolAllocedSetsLocked(descriptorPool, /*isDestroy=*/true);
7257                 deviceToDestroyDispatch->vkDestroyDescriptorPool(deviceToDestroy, descriptorPool,
7258                                                                  nullptr);
7259                 delete_VkDescriptorPool(descriptorPoolBoxed);
7260                 mDescriptorPoolInfo.erase(descriptorPool);
7261             }
7262 
7263             for (auto [descriptorSetLayout, descriptorSetLayoutBoxed] : findDeviceObjects(
7264                      deviceToDestroy, mDescriptorSetLayoutInfo, &DescriptorSetLayoutInfo::boxed)) {
7265                 deviceToDestroyDispatch->vkDestroyDescriptorSetLayout(deviceToDestroy,
7266                                                                       descriptorSetLayout, nullptr);
7267                 delete_VkDescriptorSetLayout(descriptorSetLayoutBoxed);
7268                 mDescriptorSetLayoutInfo.erase(descriptorSetLayout);
7269             }
7270 
7271             for (auto shaderModule : findDeviceObjects(deviceToDestroy, mShaderModuleInfo)) {
7272                 destroyShaderModuleLocked(deviceToDestroy, deviceToDestroyDispatch, shaderModule,
7273                                           nullptr);
7274             }
7275 
7276             for (auto pipeline : findDeviceObjects(deviceToDestroy, mPipelineInfo)) {
7277                 destroyPipelineLocked(deviceToDestroy, deviceToDestroyDispatch, pipeline, nullptr);
7278             }
7279 
7280             for (auto pipelineCache : findDeviceObjects(deviceToDestroy, mPipelineCacheInfo)) {
7281                 destroyPipelineCacheLocked(deviceToDestroy, deviceToDestroyDispatch, pipelineCache,
7282                                            nullptr);
7283             }
7284 
7285             for (auto framebuffer : findDeviceObjects(deviceToDestroy, mFramebufferInfo)) {
7286                 destroyFramebufferLocked(deviceToDestroy, deviceToDestroyDispatch, framebuffer,
7287                                          nullptr);
7288             }
7289 
7290             for (auto renderPass : findDeviceObjects(deviceToDestroy, mRenderPassInfo)) {
7291                 destroyRenderPassLocked(deviceToDestroy, deviceToDestroyDispatch, renderPass,
7292                                         nullptr);
7293             }
7294         }
7295 
7296         for (VkDevice deviceToDestroy : devicesToDestroy) {
7297             destroyDeviceLocked(deviceToDestroy, nullptr);
7298             mDeviceInfo.erase(deviceToDestroy);
7299             mDeviceToPhysicalDevice.erase(deviceToDestroy);
7300         }
7301 
7302         // TODO: Clean up the physical device info in `mPhysdevInfo` but we need to be careful
7303         // as the Vulkan spec does not guarantee that the VkPhysicalDevice handles returned are
7304         // unique per VkInstance.
7305     }
7306 
7307     typedef std::function<void()> PreprocessFunc;
7308     struct CommandBufferInfo {
7309         std::vector<PreprocessFunc> preprocessFuncs = {};
7310         std::vector<VkCommandBuffer> subCmds = {};
7311         VkDevice device = VK_NULL_HANDLE;
7312         VkCommandPool cmdPool = VK_NULL_HANDLE;
7313         VkCommandBuffer boxed = VK_NULL_HANDLE;
7314         DebugUtilsHelper debugUtilsHelper = DebugUtilsHelper::withUtilsDisabled();
7315 
7316         // Most recently bound compute pipeline and descriptor sets. We save it here so that we can
7317         // restore it after doing emulated texture decompression.
7318         VkPipeline computePipeline = VK_NULL_HANDLE;
7319         uint32_t firstSet = 0;
7320         VkPipelineLayout descriptorLayout = VK_NULL_HANDLE;
7321         std::vector<VkDescriptorSet> descriptorSets;
7322         std::vector<uint32_t> dynamicOffsets;
7323         std::unordered_set<HandleType> acquiredColorBuffers;
7324         std::unordered_set<HandleType> releasedColorBuffers;
7325         std::unordered_map<HandleType, VkImageLayout> cbLayouts;
7326         std::unordered_map<VkImage, VkImageLayout> imageLayouts;
7327 
resetgfxstream::vk::VkDecoderGlobalState::Impl::CommandBufferInfo7328         void reset() {
7329             preprocessFuncs.clear();
7330             subCmds.clear();
7331             computePipeline = VK_NULL_HANDLE;
7332             firstSet = 0;
7333             descriptorLayout = VK_NULL_HANDLE;
7334             descriptorSets.clear();
7335             dynamicOffsets.clear();
7336             acquiredColorBuffers.clear();
7337             releasedColorBuffers.clear();
7338             cbLayouts.clear();
7339             imageLayouts.clear();
7340         }
7341     };
7342 
7343     struct CommandPoolInfo {
7344         VkDevice device = VK_NULL_HANDLE;
7345         VkCommandPool boxed = VK_NULL_HANDLE;
7346         std::unordered_set<VkCommandBuffer> cmdBuffers = {};
7347     };
7348 
removeCommandBufferInfo(const std::unordered_set<VkCommandBuffer> & cmdBuffers)7349     void removeCommandBufferInfo(const std::unordered_set<VkCommandBuffer>& cmdBuffers) {
7350         for (const auto& cmdBuffer : cmdBuffers) {
7351             mCmdBufferInfo.erase(cmdBuffer);
7352         }
7353     }
7354 
isDescriptorTypeImageInfo(VkDescriptorType descType)7355     bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
7356         return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
7357                (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
7358                (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
7359                (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
7360                (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
7361     }
7362 
descriptorTypeContainsImage(VkDescriptorType descType)7363     bool descriptorTypeContainsImage(VkDescriptorType descType) {
7364         return (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
7365                (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
7366                (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
7367                (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
7368     }
7369 
descriptorTypeContainsSampler(VkDescriptorType descType)7370     bool descriptorTypeContainsSampler(VkDescriptorType descType) {
7371         return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
7372                (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
7373     }
7374 
isDescriptorTypeBufferInfo(VkDescriptorType descType)7375     bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
7376         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
7377                (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
7378                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
7379                (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
7380     }
7381 
isDescriptorTypeBufferView(VkDescriptorType descType)7382     bool isDescriptorTypeBufferView(VkDescriptorType descType) {
7383         return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
7384                (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
7385     }
7386 
isDescriptorTypeInlineUniformBlock(VkDescriptorType descType)7387     bool isDescriptorTypeInlineUniformBlock(VkDescriptorType descType) {
7388         return descType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT;
7389     }
7390 
isDescriptorTypeAccelerationStructure(VkDescriptorType descType)7391     bool isDescriptorTypeAccelerationStructure(VkDescriptorType descType) {
7392         return descType == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR;
7393     }
7394 
descriptorDependencyObjectCount(VkDescriptorType descType)7395     int descriptorDependencyObjectCount(VkDescriptorType descType) {
7396         switch (descType) {
7397             case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
7398                 return 2;
7399             case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
7400             case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
7401             case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
7402             case VK_DESCRIPTOR_TYPE_SAMPLER:
7403             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
7404             case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
7405             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
7406             case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
7407             case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
7408             case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
7409                 return 1;
7410             default:
7411                 return 0;
7412         }
7413     }
7414 
7415     struct DescriptorUpdateTemplateInfo {
7416         VkDescriptorUpdateTemplateCreateInfo createInfo;
7417         std::vector<VkDescriptorUpdateTemplateEntry> linearizedTemplateEntries;
7418         // Preallocated pData
7419         std::vector<uint8_t> data;
7420         size_t imageInfoStart;
7421         size_t bufferInfoStart;
7422         size_t bufferViewStart;
7423         size_t inlineUniformBlockStart;
7424     };
7425 
calcLinearizedDescriptorUpdateTemplateInfo(const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo)7426     DescriptorUpdateTemplateInfo calcLinearizedDescriptorUpdateTemplateInfo(
7427         const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo) {
7428         DescriptorUpdateTemplateInfo res;
7429         res.createInfo = *pCreateInfo;
7430 
7431         size_t numImageInfos = 0;
7432         size_t numBufferInfos = 0;
7433         size_t numBufferViews = 0;
7434         size_t numInlineUniformBlocks = 0;
7435 
7436         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
7437             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
7438             auto type = entry.descriptorType;
7439             auto count = entry.descriptorCount;
7440             if (isDescriptorTypeImageInfo(type)) {
7441                 numImageInfos += count;
7442             } else if (isDescriptorTypeBufferInfo(type)) {
7443                 numBufferInfos += count;
7444             } else if (isDescriptorTypeBufferView(type)) {
7445                 numBufferViews += count;
7446             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
7447                 numInlineUniformBlocks += count;
7448             } else {
7449                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
7450                     << "unknown descriptor type 0x" << std::hex << type;
7451             }
7452         }
7453 
7454         size_t imageInfoBytes = numImageInfos * sizeof(VkDescriptorImageInfo);
7455         size_t bufferInfoBytes = numBufferInfos * sizeof(VkDescriptorBufferInfo);
7456         size_t bufferViewBytes = numBufferViews * sizeof(VkBufferView);
7457         size_t inlineUniformBlockBytes = numInlineUniformBlocks;
7458 
7459         res.data.resize(imageInfoBytes + bufferInfoBytes + bufferViewBytes +
7460                         inlineUniformBlockBytes);
7461         res.imageInfoStart = 0;
7462         res.bufferInfoStart = imageInfoBytes;
7463         res.bufferViewStart = imageInfoBytes + bufferInfoBytes;
7464         res.inlineUniformBlockStart = imageInfoBytes + bufferInfoBytes + bufferViewBytes;
7465 
7466         size_t imageInfoCount = 0;
7467         size_t bufferInfoCount = 0;
7468         size_t bufferViewCount = 0;
7469         size_t inlineUniformBlockCount = 0;
7470 
7471         for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
7472             const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
7473             VkDescriptorUpdateTemplateEntry entryForHost = entry;
7474 
7475             auto type = entry.descriptorType;
7476 
7477             if (isDescriptorTypeImageInfo(type)) {
7478                 entryForHost.offset =
7479                     res.imageInfoStart + imageInfoCount * sizeof(VkDescriptorImageInfo);
7480                 entryForHost.stride = sizeof(VkDescriptorImageInfo);
7481                 ++imageInfoCount;
7482             } else if (isDescriptorTypeBufferInfo(type)) {
7483                 entryForHost.offset =
7484                     res.bufferInfoStart + bufferInfoCount * sizeof(VkDescriptorBufferInfo);
7485                 entryForHost.stride = sizeof(VkDescriptorBufferInfo);
7486                 ++bufferInfoCount;
7487             } else if (isDescriptorTypeBufferView(type)) {
7488                 entryForHost.offset = res.bufferViewStart + bufferViewCount * sizeof(VkBufferView);
7489                 entryForHost.stride = sizeof(VkBufferView);
7490                 ++bufferViewCount;
7491             } else if (type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
7492                 entryForHost.offset = res.inlineUniformBlockStart + inlineUniformBlockCount;
7493                 entryForHost.stride = 0;
7494                 inlineUniformBlockCount += entryForHost.descriptorCount;
7495             } else {
7496                 GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))
7497                     << "unknown descriptor type 0x" << std::hex << type;
7498             }
7499 
7500             res.linearizedTemplateEntries.push_back(entryForHost);
7501         }
7502 
7503         res.createInfo.pDescriptorUpdateEntries = res.linearizedTemplateEntries.data();
7504 
7505         return res;
7506     }
7507 
registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,const DescriptorUpdateTemplateInfo & info)7508     void registerDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate,
7509                                           const DescriptorUpdateTemplateInfo& info) {
7510         std::lock_guard<std::recursive_mutex> lock(mLock);
7511         mDescriptorUpdateTemplateInfo[descriptorUpdateTemplate] = info;
7512     }
7513 
unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate)7514     void unregisterDescriptorUpdateTemplate(VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
7515         std::lock_guard<std::recursive_mutex> lock(mLock);
7516         mDescriptorUpdateTemplateInfo.erase(descriptorUpdateTemplate);
7517     }
7518 
7519     // Returns the VkInstance associated with a VkDevice, or null if it's not found
deviceToInstanceLocked(VkDevice device)7520     VkInstance* deviceToInstanceLocked(VkDevice device) {
7521         auto* physicalDevice = android::base::find(mDeviceToPhysicalDevice, device);
7522         if (!physicalDevice) return nullptr;
7523         return android::base::find(mPhysicalDeviceToInstance, *physicalDevice);
7524     }
7525 
7526     VulkanDispatch* m_vk;
7527     VkEmulation* m_emu;
7528     emugl::RenderDocWithMultipleVkInstances* mRenderDocWithMultipleVkInstances = nullptr;
7529     bool mSnapshotsEnabled = false;
7530     bool mVkCleanupEnabled = true;
7531     bool mLogging = false;
7532     bool mVerbosePrints = false;
7533     bool mUseOldMemoryCleanupPath = false;
7534     bool mGuestUsesAngle = false;
7535 
7536     std::recursive_mutex mLock;
7537 
isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)7538     bool isBindingFeasibleForAlloc(const DescriptorPoolInfo::PoolState& poolState,
7539                                    const VkDescriptorSetLayoutBinding& binding) {
7540         if (binding.descriptorCount && (poolState.type != binding.descriptorType)) {
7541             return false;
7542         }
7543 
7544         uint32_t availDescriptorCount = poolState.descriptorCount - poolState.used;
7545 
7546         if (availDescriptorCount < binding.descriptorCount) {
7547             return false;
7548         }
7549 
7550         return true;
7551     }
7552 
isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState & poolState,const VkDescriptorSetLayoutBinding & binding)7553     bool isBindingFeasibleForFree(const DescriptorPoolInfo::PoolState& poolState,
7554                                   const VkDescriptorSetLayoutBinding& binding) {
7555         if (poolState.type != binding.descriptorType) return false;
7556         if (poolState.used < binding.descriptorCount) return false;
7557         return true;
7558     }
7559 
allocBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)7560     void allocBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
7561                               DescriptorPoolInfo::PoolState& poolState) {
7562         poolState.used += binding.descriptorCount;
7563     }
7564 
freeBindingFeasible(const VkDescriptorSetLayoutBinding & binding,DescriptorPoolInfo::PoolState & poolState)7565     void freeBindingFeasible(const VkDescriptorSetLayoutBinding& binding,
7566                              DescriptorPoolInfo::PoolState& poolState) {
7567         poolState.used -= binding.descriptorCount;
7568     }
7569 
validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo * pAllocateInfo)7570     VkResult validateDescriptorSetAllocLocked(const VkDescriptorSetAllocateInfo* pAllocateInfo) {
7571         auto* poolInfo = android::base::find(mDescriptorPoolInfo, pAllocateInfo->descriptorPool);
7572         if (!poolInfo) return VK_ERROR_INITIALIZATION_FAILED;
7573 
7574         // Check the number of sets available.
7575         auto setsAvailable = poolInfo->maxSets - poolInfo->usedSets;
7576 
7577         if (setsAvailable < pAllocateInfo->descriptorSetCount) {
7578             return VK_ERROR_OUT_OF_POOL_MEMORY;
7579         }
7580 
7581         // Perform simulated allocation and error out with
7582         // VK_ERROR_OUT_OF_POOL_MEMORY if it fails.
7583         std::vector<DescriptorPoolInfo::PoolState> poolCopy = poolInfo->pools;
7584 
7585         for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; ++i) {
7586             auto setLayoutInfo =
7587                 android::base::find(mDescriptorSetLayoutInfo, pAllocateInfo->pSetLayouts[i]);
7588             if (!setLayoutInfo) return VK_ERROR_INITIALIZATION_FAILED;
7589 
7590             for (const auto& binding : setLayoutInfo->bindings) {
7591                 bool success = false;
7592                 for (auto& pool : poolCopy) {
7593                     if (!isBindingFeasibleForAlloc(pool, binding)) continue;
7594 
7595                     success = true;
7596                     allocBindingFeasible(binding, pool);
7597                     break;
7598                 }
7599 
7600                 if (!success) {
7601                     return VK_ERROR_OUT_OF_POOL_MEMORY;
7602                 }
7603             }
7604         }
7605         return VK_SUCCESS;
7606     }
7607 
applyDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)7608     void applyDescriptorSetAllocationLocked(
7609         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
7610         ++poolInfo.usedSets;
7611         for (const auto& binding : bindings) {
7612             for (auto& pool : poolInfo.pools) {
7613                 if (!isBindingFeasibleForAlloc(pool, binding)) continue;
7614                 allocBindingFeasible(binding, pool);
7615                 break;
7616             }
7617         }
7618     }
7619 
removeDescriptorSetAllocationLocked(DescriptorPoolInfo & poolInfo,const std::vector<VkDescriptorSetLayoutBinding> & bindings)7620     void removeDescriptorSetAllocationLocked(
7621         DescriptorPoolInfo& poolInfo, const std::vector<VkDescriptorSetLayoutBinding>& bindings) {
7622         --poolInfo.usedSets;
7623         for (const auto& binding : bindings) {
7624             for (auto& pool : poolInfo.pools) {
7625                 if (!isBindingFeasibleForFree(pool, binding)) continue;
7626                 freeBindingFeasible(binding, pool);
7627                 break;
7628             }
7629         }
7630     }
7631 
7632     template <class T>
7633     class NonDispatchableHandleInfo {
7634        public:
7635         T underlying;
7636     };
7637 
7638     std::unordered_map<VkInstance, InstanceInfo> mInstanceInfo;
7639     std::unordered_map<VkPhysicalDevice, PhysicalDeviceInfo> mPhysdevInfo;
7640     std::unordered_map<VkDevice, DeviceInfo> mDeviceInfo;
7641     std::unordered_map<VkImage, ImageInfo> mImageInfo;
7642     std::unordered_map<VkImageView, ImageViewInfo> mImageViewInfo;
7643     std::unordered_map<VkSampler, SamplerInfo> mSamplerInfo;
7644     std::unordered_map<VkCommandBuffer, CommandBufferInfo> mCmdBufferInfo;
7645     std::unordered_map<VkCommandPool, CommandPoolInfo> mCmdPoolInfo;
7646     // TODO: release CommandBufferInfo when a command pool is reset/released
7647 
7648     // Back-reference to the physical device associated with a particular
7649     // VkDevice, and the VkDevice corresponding to a VkQueue.
7650     std::unordered_map<VkDevice, VkPhysicalDevice> mDeviceToPhysicalDevice;
7651     std::unordered_map<VkPhysicalDevice, VkInstance> mPhysicalDeviceToInstance;
7652 
7653     std::unordered_map<VkQueue, QueueInfo> mQueueInfo;
7654     std::unordered_map<VkBuffer, BufferInfo> mBufferInfo;
7655 
7656     std::unordered_map<VkDeviceMemory, MemoryInfo> mMemoryInfo;
7657 
7658     std::unordered_map<VkShaderModule, ShaderModuleInfo> mShaderModuleInfo;
7659     std::unordered_map<VkPipelineCache, PipelineCacheInfo> mPipelineCacheInfo;
7660     std::unordered_map<VkPipeline, PipelineInfo> mPipelineInfo;
7661     std::unordered_map<VkRenderPass, RenderPassInfo> mRenderPassInfo;
7662     std::unordered_map<VkFramebuffer, FramebufferInfo> mFramebufferInfo;
7663 
7664     std::unordered_map<VkSemaphore, SemaphoreInfo> mSemaphoreInfo;
7665     std::unordered_map<VkFence, FenceInfo> mFenceInfo;
7666 
7667     std::unordered_map<VkDescriptorSetLayout, DescriptorSetLayoutInfo> mDescriptorSetLayoutInfo;
7668     std::unordered_map<VkDescriptorPool, DescriptorPoolInfo> mDescriptorPoolInfo;
7669     std::unordered_map<VkDescriptorSet, DescriptorSetInfo> mDescriptorSetInfo;
7670 
7671 #ifdef _WIN32
7672     int mSemaphoreId = 1;
genSemaphoreId()7673     int genSemaphoreId() {
7674         if (mSemaphoreId == -1) {
7675             mSemaphoreId = 1;
7676         }
7677         int res = mSemaphoreId;
7678         ++mSemaphoreId;
7679         return res;
7680     }
7681     std::unordered_map<int, VkSemaphore> mExternalSemaphoresById;
7682 #endif
7683     std::unordered_map<VkDescriptorUpdateTemplate, DescriptorUpdateTemplateInfo>
7684         mDescriptorUpdateTemplateInfo;
7685 
7686     VkDecoderSnapshot mSnapshot;
7687 
7688     std::vector<uint64_t> mCreatedHandlesForSnapshotLoad;
7689     size_t mCreatedHandlesForSnapshotLoadIndex = 0;
7690 
7691     Lock mOccupiedGpasLock;
7692     // Back-reference to the VkDeviceMemory that is occupying a particular
7693     // guest physical address
7694     struct OccupiedGpaInfo {
7695         VulkanDispatch* vk;
7696         VkDevice device;
7697         VkDeviceMemory memory;
7698         uint64_t gpa;
7699         size_t sizeToPage;
7700     };
7701     std::unordered_map<uint64_t, OccupiedGpaInfo> mOccupiedGpas;
7702 
7703     struct LinearImageCreateInfo {
7704         VkExtent3D extent;
7705         VkFormat format;
7706         VkImageUsageFlags usage;
7707 
toDefaultVkgfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo7708         VkImageCreateInfo toDefaultVk() const {
7709             return VkImageCreateInfo{
7710                 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
7711                 .pNext = nullptr,
7712                 .flags = {},
7713                 .imageType = VK_IMAGE_TYPE_2D,
7714                 .format = format,
7715                 .extent = extent,
7716                 .mipLevels = 1,
7717                 .arrayLayers = 1,
7718                 .samples = VK_SAMPLE_COUNT_1_BIT,
7719                 .tiling = VK_IMAGE_TILING_LINEAR,
7720                 .usage = usage,
7721                 .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
7722                 .queueFamilyIndexCount = 0,
7723                 .pQueueFamilyIndices = nullptr,
7724                 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
7725             };
7726         }
7727 
7728         struct Hash {
operator ()gfxstream::vk::VkDecoderGlobalState::Impl::LinearImageCreateInfo::Hash7729             std::size_t operator()(const LinearImageCreateInfo& ci) const {
7730                 std::size_t s = 0;
7731                 // Magic number used in boost::hash_combine().
7732                 constexpr size_t kHashMagic = 0x9e3779b9;
7733                 s ^= std::hash<uint32_t>{}(ci.extent.width) + kHashMagic + (s << 6) + (s >> 2);
7734                 s ^= std::hash<uint32_t>{}(ci.extent.height) + kHashMagic + (s << 6) + (s >> 2);
7735                 s ^= std::hash<uint32_t>{}(ci.extent.depth) + kHashMagic + (s << 6) + (s >> 2);
7736                 s ^= std::hash<VkFormat>{}(ci.format) + kHashMagic + (s << 6) + (s >> 2);
7737                 s ^= std::hash<VkImageUsageFlags>{}(ci.usage) + kHashMagic + (s << 6) + (s >> 2);
7738                 return s;
7739             }
7740         };
7741     };
7742 
operator ==(const LinearImageCreateInfo & a,const LinearImageCreateInfo & b)7743     friend bool operator==(const LinearImageCreateInfo& a, const LinearImageCreateInfo& b) {
7744         return a.extent.width == b.extent.width && a.extent.height == b.extent.height &&
7745                a.extent.depth == b.extent.depth && a.format == b.format && a.usage == b.usage;
7746     }
7747 
7748     struct LinearImageProperties {
7749         VkDeviceSize offset;
7750         VkDeviceSize rowPitchAlignment;
7751     };
7752 
7753     // TODO(liyl): Remove after removing the old vkGetLinearImageLayoutGOOGLE.
7754     std::unordered_map<VkFormat, LinearImageProperties> mPerFormatLinearImageProperties;
7755 
7756     std::unordered_map<LinearImageCreateInfo, LinearImageProperties, LinearImageCreateInfo::Hash>
7757         mLinearImageProperties;
7758 
7759     SnapshotState mSnapshotState = SnapshotState::Normal;
7760 };
7761 
VkDecoderGlobalState()7762 VkDecoderGlobalState::VkDecoderGlobalState() : mImpl(new VkDecoderGlobalState::Impl()) {}
7763 
7764 VkDecoderGlobalState::~VkDecoderGlobalState() = default;
7765 
7766 static VkDecoderGlobalState* sGlobalDecoderState = nullptr;
7767 
7768 // static
get()7769 VkDecoderGlobalState* VkDecoderGlobalState::get() {
7770     if (sGlobalDecoderState) return sGlobalDecoderState;
7771     sGlobalDecoderState = new VkDecoderGlobalState;
7772     return sGlobalDecoderState;
7773 }
7774 
7775 // static
reset()7776 void VkDecoderGlobalState::reset() {
7777     delete sGlobalDecoderState;
7778     sGlobalDecoderState = nullptr;
7779 }
7780 
7781 // Snapshots
snapshotsEnabled() const7782 bool VkDecoderGlobalState::snapshotsEnabled() const { return mImpl->snapshotsEnabled(); }
7783 
getSnapshotState() const7784 VkDecoderGlobalState::SnapshotState VkDecoderGlobalState::getSnapshotState() const {
7785     return mImpl->getSnapshotState();
7786 }
7787 
getFeatures() const7788 const gfxstream::host::FeatureSet& VkDecoderGlobalState::getFeatures() const { return mImpl->getFeatures(); }
7789 
vkCleanupEnabled() const7790 bool VkDecoderGlobalState::vkCleanupEnabled() const { return mImpl->vkCleanupEnabled(); }
7791 
save(android::base::Stream * stream)7792 void VkDecoderGlobalState::save(android::base::Stream* stream) { mImpl->save(stream); }
7793 
load(android::base::Stream * stream,GfxApiLogger & gfxLogger,HealthMonitor<> * healthMonitor)7794 void VkDecoderGlobalState::load(android::base::Stream* stream, GfxApiLogger& gfxLogger,
7795                                 HealthMonitor<>* healthMonitor) {
7796     mImpl->load(stream, gfxLogger, healthMonitor);
7797 }
7798 
lock()7799 void VkDecoderGlobalState::lock() { mImpl->lock(); }
7800 
unlock()7801 void VkDecoderGlobalState::unlock() { mImpl->unlock(); }
7802 
setCreatedHandlesForSnapshotLoad(const unsigned char * buffer)7803 size_t VkDecoderGlobalState::setCreatedHandlesForSnapshotLoad(const unsigned char* buffer) {
7804     return mImpl->setCreatedHandlesForSnapshotLoad(buffer);
7805 }
7806 
clearCreatedHandlesForSnapshotLoad()7807 void VkDecoderGlobalState::clearCreatedHandlesForSnapshotLoad() {
7808     mImpl->clearCreatedHandlesForSnapshotLoad();
7809 }
7810 
on_vkEnumerateInstanceVersion(android::base::BumpPool * pool,uint32_t * pApiVersion)7811 VkResult VkDecoderGlobalState::on_vkEnumerateInstanceVersion(android::base::BumpPool* pool,
7812                                                              uint32_t* pApiVersion) {
7813     return mImpl->on_vkEnumerateInstanceVersion(pool, pApiVersion);
7814 }
7815 
on_vkCreateInstance(android::base::BumpPool * pool,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)7816 VkResult VkDecoderGlobalState::on_vkCreateInstance(android::base::BumpPool* pool,
7817                                                    const VkInstanceCreateInfo* pCreateInfo,
7818                                                    const VkAllocationCallbacks* pAllocator,
7819                                                    VkInstance* pInstance) {
7820     return mImpl->on_vkCreateInstance(pool, pCreateInfo, pAllocator, pInstance);
7821 }
7822 
on_vkDestroyInstance(android::base::BumpPool * pool,VkInstance instance,const VkAllocationCallbacks * pAllocator)7823 void VkDecoderGlobalState::on_vkDestroyInstance(android::base::BumpPool* pool, VkInstance instance,
7824                                                 const VkAllocationCallbacks* pAllocator) {
7825     mImpl->on_vkDestroyInstance(pool, instance, pAllocator);
7826 }
7827 
on_vkEnumeratePhysicalDevices(android::base::BumpPool * pool,VkInstance instance,uint32_t * physicalDeviceCount,VkPhysicalDevice * physicalDevices)7828 VkResult VkDecoderGlobalState::on_vkEnumeratePhysicalDevices(android::base::BumpPool* pool,
7829                                                              VkInstance instance,
7830                                                              uint32_t* physicalDeviceCount,
7831                                                              VkPhysicalDevice* physicalDevices) {
7832     return mImpl->on_vkEnumeratePhysicalDevices(pool, instance, physicalDeviceCount,
7833                                                 physicalDevices);
7834 }
7835 
on_vkGetPhysicalDeviceFeatures(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures * pFeatures)7836 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures(android::base::BumpPool* pool,
7837                                                           VkPhysicalDevice physicalDevice,
7838                                                           VkPhysicalDeviceFeatures* pFeatures) {
7839     mImpl->on_vkGetPhysicalDeviceFeatures(pool, physicalDevice, pFeatures);
7840 }
7841 
on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)7842 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2(android::base::BumpPool* pool,
7843                                                            VkPhysicalDevice physicalDevice,
7844                                                            VkPhysicalDeviceFeatures2* pFeatures) {
7845     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
7846 }
7847 
on_vkGetPhysicalDeviceFeatures2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2KHR * pFeatures)7848 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFeatures2KHR(
7849     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7850     VkPhysicalDeviceFeatures2KHR* pFeatures) {
7851     mImpl->on_vkGetPhysicalDeviceFeatures2(pool, physicalDevice, pFeatures);
7852 }
7853 
on_vkGetPhysicalDeviceImageFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)7854 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties(
7855     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
7856     VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags,
7857     VkImageFormatProperties* pImageFormatProperties) {
7858     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties(
7859         pool, physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
7860 }
on_vkGetPhysicalDeviceImageFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)7861 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2(
7862     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7863     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
7864     VkImageFormatProperties2* pImageFormatProperties) {
7865     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
7866         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
7867 }
on_vkGetPhysicalDeviceImageFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)7868 VkResult VkDecoderGlobalState::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
7869     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7870     const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
7871     VkImageFormatProperties2* pImageFormatProperties) {
7872     return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
7873         pool, physicalDevice, pImageFormatInfo, pImageFormatProperties);
7874 }
7875 
on_vkGetPhysicalDeviceFormatProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)7876 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties(
7877     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
7878     VkFormatProperties* pFormatProperties) {
7879     mImpl->on_vkGetPhysicalDeviceFormatProperties(pool, physicalDevice, format, pFormatProperties);
7880 }
7881 
on_vkGetPhysicalDeviceFormatProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)7882 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2(
7883     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
7884     VkFormatProperties2* pFormatProperties) {
7885     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
7886 }
7887 
on_vkGetPhysicalDeviceFormatProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)7888 void VkDecoderGlobalState::on_vkGetPhysicalDeviceFormatProperties2KHR(
7889     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, VkFormat format,
7890     VkFormatProperties2* pFormatProperties) {
7891     mImpl->on_vkGetPhysicalDeviceFormatProperties2(pool, physicalDevice, format, pFormatProperties);
7892 }
7893 
on_vkGetPhysicalDeviceProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)7894 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties(
7895     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7896     VkPhysicalDeviceProperties* pProperties) {
7897     mImpl->on_vkGetPhysicalDeviceProperties(pool, physicalDevice, pProperties);
7898 }
7899 
on_vkGetPhysicalDeviceProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)7900 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2(
7901     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7902     VkPhysicalDeviceProperties2* pProperties) {
7903     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
7904 }
7905 
on_vkGetPhysicalDeviceProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)7906 void VkDecoderGlobalState::on_vkGetPhysicalDeviceProperties2KHR(
7907     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7908     VkPhysicalDeviceProperties2* pProperties) {
7909     mImpl->on_vkGetPhysicalDeviceProperties2(pool, physicalDevice, pProperties);
7910 }
7911 
on_vkGetPhysicalDeviceMemoryProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)7912 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties(
7913     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7914     VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
7915     mImpl->on_vkGetPhysicalDeviceMemoryProperties(pool, physicalDevice, pMemoryProperties);
7916 }
7917 
on_vkGetPhysicalDeviceMemoryProperties2(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)7918 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2(
7919     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7920     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
7921     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
7922 }
7923 
on_vkGetPhysicalDeviceMemoryProperties2KHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)7924 void VkDecoderGlobalState::on_vkGetPhysicalDeviceMemoryProperties2KHR(
7925     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
7926     VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
7927     mImpl->on_vkGetPhysicalDeviceMemoryProperties2(pool, physicalDevice, pMemoryProperties);
7928 }
7929 
on_vkEnumerateDeviceExtensionProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)7930 VkResult VkDecoderGlobalState::on_vkEnumerateDeviceExtensionProperties(
7931     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice, const char* pLayerName,
7932     uint32_t* pPropertyCount, VkExtensionProperties* pProperties) {
7933     return mImpl->on_vkEnumerateDeviceExtensionProperties(pool, physicalDevice, pLayerName,
7934                                                           pPropertyCount, pProperties);
7935 }
7936 
on_vkCreateDevice(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)7937 VkResult VkDecoderGlobalState::on_vkCreateDevice(android::base::BumpPool* pool,
7938                                                  VkPhysicalDevice physicalDevice,
7939                                                  const VkDeviceCreateInfo* pCreateInfo,
7940                                                  const VkAllocationCallbacks* pAllocator,
7941                                                  VkDevice* pDevice) {
7942     return mImpl->on_vkCreateDevice(pool, physicalDevice, pCreateInfo, pAllocator, pDevice);
7943 }
7944 
on_vkGetDeviceQueue(android::base::BumpPool * pool,VkDevice device,uint32_t queueFamilyIndex,uint32_t queueIndex,VkQueue * pQueue)7945 void VkDecoderGlobalState::on_vkGetDeviceQueue(android::base::BumpPool* pool, VkDevice device,
7946                                                uint32_t queueFamilyIndex, uint32_t queueIndex,
7947                                                VkQueue* pQueue) {
7948     mImpl->on_vkGetDeviceQueue(pool, device, queueFamilyIndex, queueIndex, pQueue);
7949 }
7950 
on_vkGetDeviceQueue2(android::base::BumpPool * pool,VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)7951 void VkDecoderGlobalState::on_vkGetDeviceQueue2(android::base::BumpPool* pool, VkDevice device,
7952                                                 const VkDeviceQueueInfo2* pQueueInfo,
7953                                                 VkQueue* pQueue) {
7954     mImpl->on_vkGetDeviceQueue2(pool, device, pQueueInfo, pQueue);
7955 }
7956 
on_vkDestroyDevice(android::base::BumpPool * pool,VkDevice device,const VkAllocationCallbacks * pAllocator)7957 void VkDecoderGlobalState::on_vkDestroyDevice(android::base::BumpPool* pool, VkDevice device,
7958                                               const VkAllocationCallbacks* pAllocator) {
7959     mImpl->on_vkDestroyDevice(pool, device, pAllocator);
7960 }
7961 
on_vkCreateBuffer(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)7962 VkResult VkDecoderGlobalState::on_vkCreateBuffer(android::base::BumpPool* pool, VkDevice device,
7963                                                  const VkBufferCreateInfo* pCreateInfo,
7964                                                  const VkAllocationCallbacks* pAllocator,
7965                                                  VkBuffer* pBuffer) {
7966     return mImpl->on_vkCreateBuffer(pool, device, pCreateInfo, pAllocator, pBuffer);
7967 }
7968 
on_vkDestroyBuffer(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)7969 void VkDecoderGlobalState::on_vkDestroyBuffer(android::base::BumpPool* pool, VkDevice device,
7970                                               VkBuffer buffer,
7971                                               const VkAllocationCallbacks* pAllocator) {
7972     mImpl->on_vkDestroyBuffer(pool, device, buffer, pAllocator);
7973 }
7974 
on_vkBindBufferMemory(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)7975 VkResult VkDecoderGlobalState::on_vkBindBufferMemory(android::base::BumpPool* pool, VkDevice device,
7976                                                      VkBuffer buffer, VkDeviceMemory memory,
7977                                                      VkDeviceSize memoryOffset) {
7978     return mImpl->on_vkBindBufferMemory(pool, device, buffer, memory, memoryOffset);
7979 }
7980 
on_vkBindBufferMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)7981 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2(android::base::BumpPool* pool,
7982                                                       VkDevice device, uint32_t bindInfoCount,
7983                                                       const VkBindBufferMemoryInfo* pBindInfos) {
7984     return mImpl->on_vkBindBufferMemory2(pool, device, bindInfoCount, pBindInfos);
7985 }
7986 
on_vkBindBufferMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)7987 VkResult VkDecoderGlobalState::on_vkBindBufferMemory2KHR(android::base::BumpPool* pool,
7988                                                          VkDevice device, uint32_t bindInfoCount,
7989                                                          const VkBindBufferMemoryInfo* pBindInfos) {
7990     return mImpl->on_vkBindBufferMemory2KHR(pool, device, bindInfoCount, pBindInfos);
7991 }
7992 
on_vkCreateImage(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)7993 VkResult VkDecoderGlobalState::on_vkCreateImage(android::base::BumpPool* pool, VkDevice device,
7994                                                 const VkImageCreateInfo* pCreateInfo,
7995                                                 const VkAllocationCallbacks* pAllocator,
7996                                                 VkImage* pImage) {
7997     return mImpl->on_vkCreateImage(pool, device, pCreateInfo, pAllocator, pImage);
7998 }
7999 
on_vkDestroyImage(android::base::BumpPool * pool,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)8000 void VkDecoderGlobalState::on_vkDestroyImage(android::base::BumpPool* pool, VkDevice device,
8001                                              VkImage image,
8002                                              const VkAllocationCallbacks* pAllocator) {
8003     mImpl->on_vkDestroyImage(pool, device, image, pAllocator);
8004 }
8005 
on_vkBindImageMemory(android::base::BumpPool * pool,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)8006 VkResult VkDecoderGlobalState::on_vkBindImageMemory(android::base::BumpPool* pool, VkDevice device,
8007                                                     VkImage image, VkDeviceMemory memory,
8008                                                     VkDeviceSize memoryOffset) {
8009     return mImpl->on_vkBindImageMemory(pool, device, image, memory, memoryOffset);
8010 }
8011 
on_vkBindImageMemory2(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)8012 VkResult VkDecoderGlobalState::on_vkBindImageMemory2(android::base::BumpPool* pool, VkDevice device,
8013                                                      uint32_t bindInfoCount,
8014                                                      const VkBindImageMemoryInfo* pBindInfos) {
8015     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
8016 }
8017 
on_vkBindImageMemory2KHR(android::base::BumpPool * pool,VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)8018 VkResult VkDecoderGlobalState::on_vkBindImageMemory2KHR(android::base::BumpPool* pool,
8019                                                         VkDevice device, uint32_t bindInfoCount,
8020                                                         const VkBindImageMemoryInfo* pBindInfos) {
8021     return mImpl->on_vkBindImageMemory2(pool, device, bindInfoCount, pBindInfos);
8022 }
8023 
on_vkCreateImageView(android::base::BumpPool * pool,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)8024 VkResult VkDecoderGlobalState::on_vkCreateImageView(android::base::BumpPool* pool, VkDevice device,
8025                                                     const VkImageViewCreateInfo* pCreateInfo,
8026                                                     const VkAllocationCallbacks* pAllocator,
8027                                                     VkImageView* pView) {
8028     return mImpl->on_vkCreateImageView(pool, device, pCreateInfo, pAllocator, pView);
8029 }
8030 
on_vkDestroyImageView(android::base::BumpPool * pool,VkDevice device,VkImageView imageView,const VkAllocationCallbacks * pAllocator)8031 void VkDecoderGlobalState::on_vkDestroyImageView(android::base::BumpPool* pool, VkDevice device,
8032                                                  VkImageView imageView,
8033                                                  const VkAllocationCallbacks* pAllocator) {
8034     mImpl->on_vkDestroyImageView(pool, device, imageView, pAllocator);
8035 }
8036 
on_vkCreateSampler(android::base::BumpPool * pool,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)8037 VkResult VkDecoderGlobalState::on_vkCreateSampler(android::base::BumpPool* pool, VkDevice device,
8038                                                   const VkSamplerCreateInfo* pCreateInfo,
8039                                                   const VkAllocationCallbacks* pAllocator,
8040                                                   VkSampler* pSampler) {
8041     return mImpl->on_vkCreateSampler(pool, device, pCreateInfo, pAllocator, pSampler);
8042 }
8043 
on_vkDestroySampler(android::base::BumpPool * pool,VkDevice device,VkSampler sampler,const VkAllocationCallbacks * pAllocator)8044 void VkDecoderGlobalState::on_vkDestroySampler(android::base::BumpPool* pool, VkDevice device,
8045                                                VkSampler sampler,
8046                                                const VkAllocationCallbacks* pAllocator) {
8047     mImpl->on_vkDestroySampler(pool, device, sampler, pAllocator);
8048 }
8049 
on_vkCreateSemaphore(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)8050 VkResult VkDecoderGlobalState::on_vkCreateSemaphore(android::base::BumpPool* pool, VkDevice device,
8051                                                     const VkSemaphoreCreateInfo* pCreateInfo,
8052                                                     const VkAllocationCallbacks* pAllocator,
8053                                                     VkSemaphore* pSemaphore) {
8054     return mImpl->on_vkCreateSemaphore(pool, device, pCreateInfo, pAllocator, pSemaphore);
8055 }
8056 
on_vkImportSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)8057 VkResult VkDecoderGlobalState::on_vkImportSemaphoreFdKHR(
8058     android::base::BumpPool* pool, VkDevice device,
8059     const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
8060     return mImpl->on_vkImportSemaphoreFdKHR(pool, device, pImportSemaphoreFdInfo);
8061 }
8062 
on_vkGetSemaphoreFdKHR(android::base::BumpPool * pool,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)8063 VkResult VkDecoderGlobalState::on_vkGetSemaphoreFdKHR(android::base::BumpPool* pool,
8064                                                       VkDevice device,
8065                                                       const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
8066                                                       int* pFd) {
8067     return mImpl->on_vkGetSemaphoreFdKHR(pool, device, pGetFdInfo, pFd);
8068 }
8069 
on_vkDestroySemaphore(android::base::BumpPool * pool,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)8070 void VkDecoderGlobalState::on_vkDestroySemaphore(android::base::BumpPool* pool, VkDevice device,
8071                                                  VkSemaphore semaphore,
8072                                                  const VkAllocationCallbacks* pAllocator) {
8073     mImpl->on_vkDestroySemaphore(pool, device, semaphore, pAllocator);
8074 }
8075 
on_vkCreateFence(android::base::BumpPool * pool,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)8076 VkResult VkDecoderGlobalState::on_vkCreateFence(android::base::BumpPool* pool, VkDevice device,
8077                                                 const VkFenceCreateInfo* pCreateInfo,
8078                                                 const VkAllocationCallbacks* pAllocator,
8079                                                 VkFence* pFence) {
8080     return mImpl->on_vkCreateFence(pool, device, pCreateInfo, pAllocator, pFence);
8081 }
8082 
on_vkResetFences(android::base::BumpPool * pool,VkDevice device,uint32_t fenceCount,const VkFence * pFences)8083 VkResult VkDecoderGlobalState::on_vkResetFences(android::base::BumpPool* pool, VkDevice device,
8084                                                 uint32_t fenceCount, const VkFence* pFences) {
8085     return mImpl->on_vkResetFences(pool, device, fenceCount, pFences);
8086 }
8087 
on_vkDestroyFence(android::base::BumpPool * pool,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)8088 void VkDecoderGlobalState::on_vkDestroyFence(android::base::BumpPool* pool, VkDevice device,
8089                                              VkFence fence,
8090                                              const VkAllocationCallbacks* pAllocator) {
8091     return mImpl->on_vkDestroyFence(pool, device, fence, pAllocator);
8092 }
8093 
on_vkCreateDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)8094 VkResult VkDecoderGlobalState::on_vkCreateDescriptorSetLayout(
8095     android::base::BumpPool* pool, VkDevice device,
8096     const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
8097     VkDescriptorSetLayout* pSetLayout) {
8098     return mImpl->on_vkCreateDescriptorSetLayout(pool, device, pCreateInfo, pAllocator, pSetLayout);
8099 }
8100 
on_vkDestroyDescriptorSetLayout(android::base::BumpPool * pool,VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * pAllocator)8101 void VkDecoderGlobalState::on_vkDestroyDescriptorSetLayout(
8102     android::base::BumpPool* pool, VkDevice device, VkDescriptorSetLayout descriptorSetLayout,
8103     const VkAllocationCallbacks* pAllocator) {
8104     mImpl->on_vkDestroyDescriptorSetLayout(pool, device, descriptorSetLayout, pAllocator);
8105 }
8106 
on_vkCreateDescriptorPool(android::base::BumpPool * pool,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)8107 VkResult VkDecoderGlobalState::on_vkCreateDescriptorPool(
8108     android::base::BumpPool* pool, VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo,
8109     const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool) {
8110     return mImpl->on_vkCreateDescriptorPool(pool, device, pCreateInfo, pAllocator, pDescriptorPool);
8111 }
8112 
on_vkDestroyDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)8113 void VkDecoderGlobalState::on_vkDestroyDescriptorPool(android::base::BumpPool* pool,
8114                                                       VkDevice device,
8115                                                       VkDescriptorPool descriptorPool,
8116                                                       const VkAllocationCallbacks* pAllocator) {
8117     mImpl->on_vkDestroyDescriptorPool(pool, device, descriptorPool, pAllocator);
8118 }
8119 
on_vkResetDescriptorPool(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)8120 VkResult VkDecoderGlobalState::on_vkResetDescriptorPool(android::base::BumpPool* pool,
8121                                                         VkDevice device,
8122                                                         VkDescriptorPool descriptorPool,
8123                                                         VkDescriptorPoolResetFlags flags) {
8124     return mImpl->on_vkResetDescriptorPool(pool, device, descriptorPool, flags);
8125 }
8126 
on_vkAllocateDescriptorSets(android::base::BumpPool * pool,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)8127 VkResult VkDecoderGlobalState::on_vkAllocateDescriptorSets(
8128     android::base::BumpPool* pool, VkDevice device,
8129     const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets) {
8130     return mImpl->on_vkAllocateDescriptorSets(pool, device, pAllocateInfo, pDescriptorSets);
8131 }
8132 
on_vkFreeDescriptorSets(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)8133 VkResult VkDecoderGlobalState::on_vkFreeDescriptorSets(android::base::BumpPool* pool,
8134                                                        VkDevice device,
8135                                                        VkDescriptorPool descriptorPool,
8136                                                        uint32_t descriptorSetCount,
8137                                                        const VkDescriptorSet* pDescriptorSets) {
8138     return mImpl->on_vkFreeDescriptorSets(pool, device, descriptorPool, descriptorSetCount,
8139                                           pDescriptorSets);
8140 }
8141 
on_vkUpdateDescriptorSets(android::base::BumpPool * pool,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)8142 void VkDecoderGlobalState::on_vkUpdateDescriptorSets(android::base::BumpPool* pool, VkDevice device,
8143                                                      uint32_t descriptorWriteCount,
8144                                                      const VkWriteDescriptorSet* pDescriptorWrites,
8145                                                      uint32_t descriptorCopyCount,
8146                                                      const VkCopyDescriptorSet* pDescriptorCopies) {
8147     mImpl->on_vkUpdateDescriptorSets(pool, device, descriptorWriteCount, pDescriptorWrites,
8148                                      descriptorCopyCount, pDescriptorCopies);
8149 }
8150 
on_vkCreateShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,const VkShaderModuleCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkShaderModule * pShaderModule)8151 VkResult VkDecoderGlobalState::on_vkCreateShaderModule(android::base::BumpPool* pool,
8152                                                        VkDevice boxed_device,
8153                                                        const VkShaderModuleCreateInfo* pCreateInfo,
8154                                                        const VkAllocationCallbacks* pAllocator,
8155                                                        VkShaderModule* pShaderModule) {
8156     return mImpl->on_vkCreateShaderModule(pool, boxed_device, pCreateInfo, pAllocator,
8157                                           pShaderModule);
8158 }
8159 
on_vkDestroyShaderModule(android::base::BumpPool * pool,VkDevice boxed_device,VkShaderModule shaderModule,const VkAllocationCallbacks * pAllocator)8160 void VkDecoderGlobalState::on_vkDestroyShaderModule(android::base::BumpPool* pool,
8161                                                     VkDevice boxed_device,
8162                                                     VkShaderModule shaderModule,
8163                                                     const VkAllocationCallbacks* pAllocator) {
8164     mImpl->on_vkDestroyShaderModule(pool, boxed_device, shaderModule, pAllocator);
8165 }
8166 
on_vkCreatePipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,const VkPipelineCacheCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkPipelineCache * pPipelineCache)8167 VkResult VkDecoderGlobalState::on_vkCreatePipelineCache(
8168     android::base::BumpPool* pool, VkDevice boxed_device,
8169     const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
8170     VkPipelineCache* pPipelineCache) {
8171     return mImpl->on_vkCreatePipelineCache(pool, boxed_device, pCreateInfo, pAllocator,
8172                                            pPipelineCache);
8173 }
8174 
on_vkDestroyPipelineCache(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * pAllocator)8175 void VkDecoderGlobalState::on_vkDestroyPipelineCache(android::base::BumpPool* pool,
8176                                                      VkDevice boxed_device,
8177                                                      VkPipelineCache pipelineCache,
8178                                                      const VkAllocationCallbacks* pAllocator) {
8179     mImpl->on_vkDestroyPipelineCache(pool, boxed_device, pipelineCache, pAllocator);
8180 }
8181 
on_vkCreateGraphicsPipelines(android::base::BumpPool * pool,VkDevice boxed_device,VkPipelineCache pipelineCache,uint32_t createInfoCount,const VkGraphicsPipelineCreateInfo * pCreateInfos,const VkAllocationCallbacks * pAllocator,VkPipeline * pPipelines)8182 VkResult VkDecoderGlobalState::on_vkCreateGraphicsPipelines(
8183     android::base::BumpPool* pool, VkDevice boxed_device, VkPipelineCache pipelineCache,
8184     uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos,
8185     const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines) {
8186     return mImpl->on_vkCreateGraphicsPipelines(pool, boxed_device, pipelineCache, createInfoCount,
8187                                                pCreateInfos, pAllocator, pPipelines);
8188 }
8189 
on_vkDestroyPipeline(android::base::BumpPool * pool,VkDevice boxed_device,VkPipeline pipeline,const VkAllocationCallbacks * pAllocator)8190 void VkDecoderGlobalState::on_vkDestroyPipeline(android::base::BumpPool* pool,
8191                                                 VkDevice boxed_device, VkPipeline pipeline,
8192                                                 const VkAllocationCallbacks* pAllocator) {
8193     mImpl->on_vkDestroyPipeline(pool, boxed_device, pipeline, pAllocator);
8194 }
8195 
on_vkCmdCopyBufferToImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkBuffer srcBuffer,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions,const VkDecoderContext & context)8196 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage(
8197     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkBuffer srcBuffer,
8198     VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount,
8199     const VkBufferImageCopy* pRegions, const VkDecoderContext& context) {
8200     mImpl->on_vkCmdCopyBufferToImage(pool, commandBuffer, srcBuffer, dstImage, dstImageLayout,
8201                                      regionCount, pRegions, context);
8202 }
8203 
on_vkCmdCopyImage(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage dstImage,VkImageLayout dstImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)8204 void VkDecoderGlobalState::on_vkCmdCopyImage(android::base::BumpPool* pool,
8205                                              VkCommandBuffer commandBuffer, VkImage srcImage,
8206                                              VkImageLayout srcImageLayout, VkImage dstImage,
8207                                              VkImageLayout dstImageLayout, uint32_t regionCount,
8208                                              const VkImageCopy* pRegions) {
8209     mImpl->on_vkCmdCopyImage(pool, commandBuffer, srcImage, srcImageLayout, dstImage,
8210                              dstImageLayout, regionCount, pRegions);
8211 }
on_vkCmdCopyImageToBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer dstBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)8212 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer(android::base::BumpPool* pool,
8213                                                      VkCommandBuffer commandBuffer,
8214                                                      VkImage srcImage, VkImageLayout srcImageLayout,
8215                                                      VkBuffer dstBuffer, uint32_t regionCount,
8216                                                      const VkBufferImageCopy* pRegions) {
8217     mImpl->on_vkCmdCopyImageToBuffer(pool, commandBuffer, srcImage, srcImageLayout, dstBuffer,
8218                                      regionCount, pRegions);
8219 }
8220 
on_vkCmdCopyBufferToImage2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo,const VkDecoderContext & context)8221 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage2(android::base::BumpPool* pool,
8222                                 VkCommandBuffer commandBuffer,
8223                                 const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo,
8224                                 const VkDecoderContext& context) {
8225     mImpl->on_vkCmdCopyBufferToImage2(pool, commandBuffer, pCopyBufferToImageInfo, context);
8226 }
8227 
on_vkCmdCopyImage2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageInfo2 * pCopyImageInfo)8228 void VkDecoderGlobalState::on_vkCmdCopyImage2(android::base::BumpPool* pool,
8229     VkCommandBuffer commandBuffer,
8230     const VkCopyImageInfo2* pCopyImageInfo) {
8231     mImpl->on_vkCmdCopyImage2(pool, commandBuffer, pCopyImageInfo);
8232 }
8233 
on_vkCmdCopyImageToBuffer2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo)8234 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer2(android::base::BumpPool* pool,
8235                                 VkCommandBuffer commandBuffer,
8236                                 const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) {
8237     mImpl->on_vkCmdCopyImageToBuffer2(pool, commandBuffer, pCopyImageToBufferInfo);
8238 }
8239 
on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyBufferToImageInfo2KHR * pCopyBufferToImageInfo,const VkDecoderContext & context)8240 void VkDecoderGlobalState::on_vkCmdCopyBufferToImage2KHR(android::base::BumpPool* pool,
8241                                 VkCommandBuffer commandBuffer,
8242                                 const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo,
8243                                 const VkDecoderContext& context) {
8244     mImpl->on_vkCmdCopyBufferToImage2KHR(pool, commandBuffer, pCopyBufferToImageInfo, context);
8245 }
8246 
on_vkCmdCopyImage2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageInfo2KHR * pCopyImageInfo)8247 void VkDecoderGlobalState::on_vkCmdCopyImage2KHR(android::base::BumpPool* pool,
8248     VkCommandBuffer commandBuffer,
8249     const VkCopyImageInfo2KHR* pCopyImageInfo) {
8250     mImpl->on_vkCmdCopyImage2KHR(pool, commandBuffer, pCopyImageInfo);
8251 }
8252 
on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCopyImageToBufferInfo2KHR * pCopyImageToBufferInfo)8253 void VkDecoderGlobalState::on_vkCmdCopyImageToBuffer2KHR(android::base::BumpPool* pool,
8254                                 VkCommandBuffer commandBuffer,
8255                                 const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo) {
8256     mImpl->on_vkCmdCopyImageToBuffer2KHR(pool, commandBuffer, pCopyImageToBufferInfo);
8257 }
8258 
on_vkGetImageMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)8259 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements(
8260     android::base::BumpPool* pool, VkDevice device, VkImage image,
8261     VkMemoryRequirements* pMemoryRequirements) {
8262     mImpl->on_vkGetImageMemoryRequirements(pool, device, image, pMemoryRequirements);
8263 }
8264 
on_vkGetImageMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)8265 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2(
8266     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
8267     VkMemoryRequirements2* pMemoryRequirements) {
8268     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
8269 }
8270 
on_vkGetImageMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)8271 void VkDecoderGlobalState::on_vkGetImageMemoryRequirements2KHR(
8272     android::base::BumpPool* pool, VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo,
8273     VkMemoryRequirements2* pMemoryRequirements) {
8274     mImpl->on_vkGetImageMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
8275 }
8276 
on_vkGetBufferMemoryRequirements(android::base::BumpPool * pool,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)8277 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements(
8278     android::base::BumpPool* pool, VkDevice device, VkBuffer buffer,
8279     VkMemoryRequirements* pMemoryRequirements) {
8280     mImpl->on_vkGetBufferMemoryRequirements(pool, device, buffer, pMemoryRequirements);
8281 }
8282 
on_vkGetBufferMemoryRequirements2(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)8283 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2(
8284     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
8285     VkMemoryRequirements2* pMemoryRequirements) {
8286     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
8287 }
8288 
on_vkGetBufferMemoryRequirements2KHR(android::base::BumpPool * pool,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)8289 void VkDecoderGlobalState::on_vkGetBufferMemoryRequirements2KHR(
8290     android::base::BumpPool* pool, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
8291     VkMemoryRequirements2* pMemoryRequirements) {
8292     mImpl->on_vkGetBufferMemoryRequirements2(pool, device, pInfo, pMemoryRequirements);
8293 }
8294 
on_vkCmdPipelineBarrier(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)8295 void VkDecoderGlobalState::on_vkCmdPipelineBarrier(
8296     android::base::BumpPool* pool, VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask,
8297     VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags,
8298     uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers,
8299     uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers,
8300     uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
8301     mImpl->on_vkCmdPipelineBarrier(pool, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
8302                                    memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
8303                                    pBufferMemoryBarriers, imageMemoryBarrierCount,
8304                                    pImageMemoryBarriers);
8305 }
8306 
on_vkAllocateMemory(android::base::BumpPool * pool,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)8307 VkResult VkDecoderGlobalState::on_vkAllocateMemory(android::base::BumpPool* pool, VkDevice device,
8308                                                    const VkMemoryAllocateInfo* pAllocateInfo,
8309                                                    const VkAllocationCallbacks* pAllocator,
8310                                                    VkDeviceMemory* pMemory) {
8311     return mImpl->on_vkAllocateMemory(pool, device, pAllocateInfo, pAllocator, pMemory);
8312 }
8313 
on_vkFreeMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)8314 void VkDecoderGlobalState::on_vkFreeMemory(android::base::BumpPool* pool, VkDevice device,
8315                                            VkDeviceMemory memory,
8316                                            const VkAllocationCallbacks* pAllocator) {
8317     mImpl->on_vkFreeMemory(pool, device, memory, pAllocator);
8318 }
8319 
on_vkMapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)8320 VkResult VkDecoderGlobalState::on_vkMapMemory(android::base::BumpPool* pool, VkDevice device,
8321                                               VkDeviceMemory memory, VkDeviceSize offset,
8322                                               VkDeviceSize size, VkMemoryMapFlags flags,
8323                                               void** ppData) {
8324     return mImpl->on_vkMapMemory(pool, device, memory, offset, size, flags, ppData);
8325 }
8326 
on_vkUnmapMemory(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)8327 void VkDecoderGlobalState::on_vkUnmapMemory(android::base::BumpPool* pool, VkDevice device,
8328                                             VkDeviceMemory memory) {
8329     mImpl->on_vkUnmapMemory(pool, device, memory);
8330 }
8331 
getMappedHostPointer(VkDeviceMemory memory)8332 uint8_t* VkDecoderGlobalState::getMappedHostPointer(VkDeviceMemory memory) {
8333     return mImpl->getMappedHostPointer(memory);
8334 }
8335 
getDeviceMemorySize(VkDeviceMemory memory)8336 VkDeviceSize VkDecoderGlobalState::getDeviceMemorySize(VkDeviceMemory memory) {
8337     return mImpl->getDeviceMemorySize(memory);
8338 }
8339 
usingDirectMapping() const8340 bool VkDecoderGlobalState::usingDirectMapping() const { return mImpl->usingDirectMapping(); }
8341 
getHostFeatureSupport() const8342 VkDecoderGlobalState::HostFeatureSupport VkDecoderGlobalState::getHostFeatureSupport() const {
8343     return mImpl->getHostFeatureSupport();
8344 }
8345 
8346 // VK_ANDROID_native_buffer
on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,int * grallocUsage)8347 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsageANDROID(android::base::BumpPool* pool,
8348                                                                     VkDevice device,
8349                                                                     VkFormat format,
8350                                                                     VkImageUsageFlags imageUsage,
8351                                                                     int* grallocUsage) {
8352     return mImpl->on_vkGetSwapchainGrallocUsageANDROID(pool, device, format, imageUsage,
8353                                                        grallocUsage);
8354 }
8355 
on_vkGetSwapchainGrallocUsage2ANDROID(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkImageUsageFlags imageUsage,VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)8356 VkResult VkDecoderGlobalState::on_vkGetSwapchainGrallocUsage2ANDROID(
8357     android::base::BumpPool* pool, VkDevice device, VkFormat format, VkImageUsageFlags imageUsage,
8358     VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, uint64_t* grallocConsumerUsage,
8359     uint64_t* grallocProducerUsage) {
8360     return mImpl->on_vkGetSwapchainGrallocUsage2ANDROID(pool, device, format, imageUsage,
8361                                                         swapchainImageUsage, grallocConsumerUsage,
8362                                                         grallocProducerUsage);
8363 }
8364 
on_vkAcquireImageANDROID(android::base::BumpPool * pool,VkDevice device,VkImage image,int nativeFenceFd,VkSemaphore semaphore,VkFence fence)8365 VkResult VkDecoderGlobalState::on_vkAcquireImageANDROID(android::base::BumpPool* pool,
8366                                                         VkDevice device, VkImage image,
8367                                                         int nativeFenceFd, VkSemaphore semaphore,
8368                                                         VkFence fence) {
8369     return mImpl->on_vkAcquireImageANDROID(pool, device, image, nativeFenceFd, semaphore, fence);
8370 }
8371 
on_vkQueueSignalReleaseImageANDROID(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image,int * pNativeFenceFd)8372 VkResult VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROID(
8373     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
8374     const VkSemaphore* pWaitSemaphores, VkImage image, int* pNativeFenceFd) {
8375     return mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount,
8376                                                       pWaitSemaphores, image, pNativeFenceFd);
8377 }
8378 
8379 // VK_GOOGLE_gfxstream
on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)8380 VkResult VkDecoderGlobalState::on_vkMapMemoryIntoAddressSpaceGOOGLE(android::base::BumpPool* pool,
8381                                                                     VkDevice device,
8382                                                                     VkDeviceMemory memory,
8383                                                                     uint64_t* pAddress) {
8384     return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(pool, device, memory, pAddress);
8385 }
8386 
on_vkGetMemoryHostAddressInfoGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress,uint64_t * pSize,uint64_t * pHostmemId)8387 VkResult VkDecoderGlobalState::on_vkGetMemoryHostAddressInfoGOOGLE(
8388     android::base::BumpPool* pool, VkDevice device, VkDeviceMemory memory, uint64_t* pAddress,
8389     uint64_t* pSize, uint64_t* pHostmemId) {
8390     return mImpl->on_vkGetMemoryHostAddressInfoGOOGLE(pool, device, memory, pAddress, pSize,
8391                                                       pHostmemId);
8392 }
8393 
on_vkGetBlobGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory)8394 VkResult VkDecoderGlobalState::on_vkGetBlobGOOGLE(android::base::BumpPool* pool, VkDevice device,
8395                                                   VkDeviceMemory memory) {
8396     return mImpl->on_vkGetBlobGOOGLE(pool, device, memory);
8397 }
8398 
on_vkFreeMemorySyncGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)8399 VkResult VkDecoderGlobalState::on_vkFreeMemorySyncGOOGLE(android::base::BumpPool* pool,
8400                                                          VkDevice device, VkDeviceMemory memory,
8401                                                          const VkAllocationCallbacks* pAllocator) {
8402     return mImpl->on_vkFreeMemorySyncGOOGLE(pool, device, memory, pAllocator);
8403 }
8404 
on_vkAllocateCommandBuffers(android::base::BumpPool * pool,VkDevice device,const VkCommandBufferAllocateInfo * pAllocateInfo,VkCommandBuffer * pCommandBuffers)8405 VkResult VkDecoderGlobalState::on_vkAllocateCommandBuffers(
8406     android::base::BumpPool* pool, VkDevice device,
8407     const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers) {
8408     return mImpl->on_vkAllocateCommandBuffers(pool, device, pAllocateInfo, pCommandBuffers);
8409 }
8410 
on_vkCreateCommandPool(android::base::BumpPool * pool,VkDevice device,const VkCommandPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkCommandPool * pCommandPool)8411 VkResult VkDecoderGlobalState::on_vkCreateCommandPool(android::base::BumpPool* pool,
8412                                                       VkDevice device,
8413                                                       const VkCommandPoolCreateInfo* pCreateInfo,
8414                                                       const VkAllocationCallbacks* pAllocator,
8415                                                       VkCommandPool* pCommandPool) {
8416     return mImpl->on_vkCreateCommandPool(pool, device, pCreateInfo, pAllocator, pCommandPool);
8417 }
8418 
on_vkDestroyCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,const VkAllocationCallbacks * pAllocator)8419 void VkDecoderGlobalState::on_vkDestroyCommandPool(android::base::BumpPool* pool, VkDevice device,
8420                                                    VkCommandPool commandPool,
8421                                                    const VkAllocationCallbacks* pAllocator) {
8422     mImpl->on_vkDestroyCommandPool(pool, device, commandPool, pAllocator);
8423 }
8424 
on_vkResetCommandPool(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,VkCommandPoolResetFlags flags)8425 VkResult VkDecoderGlobalState::on_vkResetCommandPool(android::base::BumpPool* pool, VkDevice device,
8426                                                      VkCommandPool commandPool,
8427                                                      VkCommandPoolResetFlags flags) {
8428     return mImpl->on_vkResetCommandPool(pool, device, commandPool, flags);
8429 }
8430 
on_vkCmdExecuteCommands(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)8431 void VkDecoderGlobalState::on_vkCmdExecuteCommands(android::base::BumpPool* pool,
8432                                                    VkCommandBuffer commandBuffer,
8433                                                    uint32_t commandBufferCount,
8434                                                    const VkCommandBuffer* pCommandBuffers) {
8435     return mImpl->on_vkCmdExecuteCommands(pool, commandBuffer, commandBufferCount, pCommandBuffers);
8436 }
8437 
on_vkQueueSubmit(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)8438 VkResult VkDecoderGlobalState::on_vkQueueSubmit(android::base::BumpPool* pool, VkQueue queue,
8439                                                 uint32_t submitCount, const VkSubmitInfo* pSubmits,
8440                                                 VkFence fence) {
8441     return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
8442 }
8443 
on_vkQueueSubmit2(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)8444 VkResult VkDecoderGlobalState::on_vkQueueSubmit2(android::base::BumpPool* pool, VkQueue queue,
8445                                                  uint32_t submitCount,
8446                                                  const VkSubmitInfo2* pSubmits, VkFence fence) {
8447     return mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
8448 }
8449 
on_vkQueueWaitIdle(android::base::BumpPool * pool,VkQueue queue)8450 VkResult VkDecoderGlobalState::on_vkQueueWaitIdle(android::base::BumpPool* pool, VkQueue queue) {
8451     return mImpl->on_vkQueueWaitIdle(pool, queue);
8452 }
8453 
on_vkResetCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)8454 VkResult VkDecoderGlobalState::on_vkResetCommandBuffer(android::base::BumpPool* pool,
8455                                                        VkCommandBuffer commandBuffer,
8456                                                        VkCommandBufferResetFlags flags) {
8457     return mImpl->on_vkResetCommandBuffer(pool, commandBuffer, flags);
8458 }
8459 
on_vkFreeCommandBuffers(android::base::BumpPool * pool,VkDevice device,VkCommandPool commandPool,uint32_t commandBufferCount,const VkCommandBuffer * pCommandBuffers)8460 void VkDecoderGlobalState::on_vkFreeCommandBuffers(android::base::BumpPool* pool, VkDevice device,
8461                                                    VkCommandPool commandPool,
8462                                                    uint32_t commandBufferCount,
8463                                                    const VkCommandBuffer* pCommandBuffers) {
8464     return mImpl->on_vkFreeCommandBuffers(pool, device, commandPool, commandBufferCount,
8465                                           pCommandBuffers);
8466 }
8467 
on_vkGetPhysicalDeviceExternalSemaphoreProperties(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)8468 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphoreProperties(
8469     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
8470     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
8471     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
8472     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
8473         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
8474 }
8475 
on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(android::base::BumpPool * pool,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)8476 void VkDecoderGlobalState::on_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
8477     android::base::BumpPool* pool, VkPhysicalDevice physicalDevice,
8478     const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
8479     VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
8480     return mImpl->on_vkGetPhysicalDeviceExternalSemaphoreProperties(
8481         pool, physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
8482 }
8483 
8484 // Descriptor update templates
on_vkCreateDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)8485 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplate(
8486     android::base::BumpPool* pool, VkDevice boxed_device,
8487     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
8488     const VkAllocationCallbacks* pAllocator,
8489     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
8490     return mImpl->on_vkCreateDescriptorUpdateTemplate(pool, boxed_device, pCreateInfo, pAllocator,
8491                                                       pDescriptorUpdateTemplate);
8492 }
8493 
on_vkCreateDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)8494 VkResult VkDecoderGlobalState::on_vkCreateDescriptorUpdateTemplateKHR(
8495     android::base::BumpPool* pool, VkDevice boxed_device,
8496     const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
8497     const VkAllocationCallbacks* pAllocator,
8498     VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
8499     return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(pool, boxed_device, pCreateInfo,
8500                                                          pAllocator, pDescriptorUpdateTemplate);
8501 }
8502 
on_vkDestroyDescriptorUpdateTemplate(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)8503 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplate(
8504     android::base::BumpPool* pool, VkDevice boxed_device,
8505     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
8506     mImpl->on_vkDestroyDescriptorUpdateTemplate(pool, boxed_device, descriptorUpdateTemplate,
8507                                                 pAllocator);
8508 }
8509 
on_vkDestroyDescriptorUpdateTemplateKHR(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)8510 void VkDecoderGlobalState::on_vkDestroyDescriptorUpdateTemplateKHR(
8511     android::base::BumpPool* pool, VkDevice boxed_device,
8512     VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
8513     mImpl->on_vkDestroyDescriptorUpdateTemplateKHR(pool, boxed_device, descriptorUpdateTemplate,
8514                                                    pAllocator);
8515 }
8516 
on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews)8517 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
8518     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
8519     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
8520     uint32_t bufferInfoCount, uint32_t bufferViewCount, const uint32_t* pImageInfoEntryIndices,
8521     const uint32_t* pBufferInfoEntryIndices, const uint32_t* pBufferViewEntryIndices,
8522     const VkDescriptorImageInfo* pImageInfos, const VkDescriptorBufferInfo* pBufferInfos,
8523     const VkBufferView* pBufferViews) {
8524     mImpl->on_vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
8525         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
8526         bufferInfoCount, bufferViewCount, pImageInfoEntryIndices, pBufferInfoEntryIndices,
8527         pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews);
8528 }
8529 
on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(android::base::BumpPool * pool,VkDevice boxed_device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,uint32_t imageInfoCount,uint32_t bufferInfoCount,uint32_t bufferViewCount,uint32_t inlineUniformBlockCount,const uint32_t * pImageInfoEntryIndices,const uint32_t * pBufferInfoEntryIndices,const uint32_t * pBufferViewEntryIndices,const VkDescriptorImageInfo * pImageInfos,const VkDescriptorBufferInfo * pBufferInfos,const VkBufferView * pBufferViews,const uint8_t * pInlineUniformBlockData)8530 void VkDecoderGlobalState::on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
8531     android::base::BumpPool* pool, VkDevice boxed_device, VkDescriptorSet descriptorSet,
8532     VkDescriptorUpdateTemplate descriptorUpdateTemplate, uint32_t imageInfoCount,
8533     uint32_t bufferInfoCount, uint32_t bufferViewCount, uint32_t inlineUniformBlockCount,
8534     const uint32_t* pImageInfoEntryIndices, const uint32_t* pBufferInfoEntryIndices,
8535     const uint32_t* pBufferViewEntryIndices, const VkDescriptorImageInfo* pImageInfos,
8536     const VkDescriptorBufferInfo* pBufferInfos, const VkBufferView* pBufferViews,
8537     const uint8_t* pInlineUniformBlockData) {
8538     mImpl->on_vkUpdateDescriptorSetWithTemplateSized2GOOGLE(
8539         pool, boxed_device, descriptorSet, descriptorUpdateTemplate, imageInfoCount,
8540         bufferInfoCount, bufferViewCount, inlineUniformBlockCount, pImageInfoEntryIndices,
8541         pBufferInfoEntryIndices, pBufferViewEntryIndices, pImageInfos, pBufferInfos, pBufferViews,
8542         pInlineUniformBlockData);
8543 }
8544 
on_vkBeginCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)8545 VkResult VkDecoderGlobalState::on_vkBeginCommandBuffer(android::base::BumpPool* pool,
8546                                                        VkCommandBuffer commandBuffer,
8547                                                        const VkCommandBufferBeginInfo* pBeginInfo,
8548                                                        const VkDecoderContext& context) {
8549     return mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
8550 }
8551 
on_vkBeginCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo,const VkDecoderContext & context)8552 void VkDecoderGlobalState::on_vkBeginCommandBufferAsyncGOOGLE(
8553     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
8554     const VkCommandBufferBeginInfo* pBeginInfo, const VkDecoderContext& context) {
8555     mImpl->on_vkBeginCommandBuffer(pool, commandBuffer, pBeginInfo, context);
8556 }
8557 
on_vkEndCommandBuffer(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)8558 VkResult VkDecoderGlobalState::on_vkEndCommandBuffer(android::base::BumpPool* pool,
8559                                                      VkCommandBuffer commandBuffer,
8560                                                      const VkDecoderContext& context) {
8561     return mImpl->on_vkEndCommandBuffer(pool, commandBuffer, context);
8562 }
8563 
on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkDecoderContext & context)8564 void VkDecoderGlobalState::on_vkEndCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
8565                                                             VkCommandBuffer commandBuffer,
8566                                                             const VkDecoderContext& context) {
8567     mImpl->on_vkEndCommandBufferAsyncGOOGLE(pool, commandBuffer, context);
8568 }
8569 
on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)8570 void VkDecoderGlobalState::on_vkResetCommandBufferAsyncGOOGLE(android::base::BumpPool* pool,
8571                                                               VkCommandBuffer commandBuffer,
8572                                                               VkCommandBufferResetFlags flags) {
8573     mImpl->on_vkResetCommandBufferAsyncGOOGLE(pool, commandBuffer, flags);
8574 }
8575 
on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,uint32_t needHostSync,uint32_t sequenceNumber)8576 void VkDecoderGlobalState::on_vkCommandBufferHostSyncGOOGLE(android::base::BumpPool* pool,
8577                                                             VkCommandBuffer commandBuffer,
8578                                                             uint32_t needHostSync,
8579                                                             uint32_t sequenceNumber) {
8580     mImpl->hostSyncCommandBuffer("hostSync", commandBuffer, needHostSync, sequenceNumber);
8581 }
8582 
on_vkCreateImageWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage,VkMemoryRequirements * pMemoryRequirements)8583 VkResult VkDecoderGlobalState::on_vkCreateImageWithRequirementsGOOGLE(
8584     android::base::BumpPool* pool, VkDevice device, const VkImageCreateInfo* pCreateInfo,
8585     const VkAllocationCallbacks* pAllocator, VkImage* pImage,
8586     VkMemoryRequirements* pMemoryRequirements) {
8587     return mImpl->on_vkCreateImageWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
8588                                                          pImage, pMemoryRequirements);
8589 }
8590 
on_vkCreateBufferWithRequirementsGOOGLE(android::base::BumpPool * pool,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer,VkMemoryRequirements * pMemoryRequirements)8591 VkResult VkDecoderGlobalState::on_vkCreateBufferWithRequirementsGOOGLE(
8592     android::base::BumpPool* pool, VkDevice device, const VkBufferCreateInfo* pCreateInfo,
8593     const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer,
8594     VkMemoryRequirements* pMemoryRequirements) {
8595     return mImpl->on_vkCreateBufferWithRequirementsGOOGLE(pool, device, pCreateInfo, pAllocator,
8596                                                           pBuffer, pMemoryRequirements);
8597 }
8598 
on_vkCmdBindPipeline(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)8599 void VkDecoderGlobalState::on_vkCmdBindPipeline(android::base::BumpPool* pool,
8600                                                 VkCommandBuffer commandBuffer,
8601                                                 VkPipelineBindPoint pipelineBindPoint,
8602                                                 VkPipeline pipeline) {
8603     mImpl->on_vkCmdBindPipeline(pool, commandBuffer, pipelineBindPoint, pipeline);
8604 }
8605 
on_vkCmdBindDescriptorSets(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)8606 void VkDecoderGlobalState::on_vkCmdBindDescriptorSets(
8607     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
8608     VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet,
8609     uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets,
8610     uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
8611     mImpl->on_vkCmdBindDescriptorSets(pool, commandBuffer, pipelineBindPoint, layout, firstSet,
8612                                       descriptorSetCount, pDescriptorSets, dynamicOffsetCount,
8613                                       pDynamicOffsets);
8614 }
8615 
on_vkCreateRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)8616 VkResult VkDecoderGlobalState::on_vkCreateRenderPass(android::base::BumpPool* pool,
8617                                                      VkDevice boxed_device,
8618                                                      const VkRenderPassCreateInfo* pCreateInfo,
8619                                                      const VkAllocationCallbacks* pAllocator,
8620                                                      VkRenderPass* pRenderPass) {
8621     return mImpl->on_vkCreateRenderPass(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
8622 }
8623 
on_vkCreateRenderPass2(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2 * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)8624 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2(android::base::BumpPool* pool,
8625                                                       VkDevice boxed_device,
8626                                                       const VkRenderPassCreateInfo2* pCreateInfo,
8627                                                       const VkAllocationCallbacks* pAllocator,
8628                                                       VkRenderPass* pRenderPass) {
8629     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
8630 }
8631 
on_vkCreateRenderPass2KHR(android::base::BumpPool * pool,VkDevice boxed_device,const VkRenderPassCreateInfo2KHR * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkRenderPass * pRenderPass)8632 VkResult VkDecoderGlobalState::on_vkCreateRenderPass2KHR(
8633     android::base::BumpPool* pool, VkDevice boxed_device,
8634     const VkRenderPassCreateInfo2KHR* pCreateInfo, const VkAllocationCallbacks* pAllocator,
8635     VkRenderPass* pRenderPass) {
8636     return mImpl->on_vkCreateRenderPass2(pool, boxed_device, pCreateInfo, pAllocator, pRenderPass);
8637 }
8638 
on_vkDestroyRenderPass(android::base::BumpPool * pool,VkDevice boxed_device,VkRenderPass renderPass,const VkAllocationCallbacks * pAllocator)8639 void VkDecoderGlobalState::on_vkDestroyRenderPass(android::base::BumpPool* pool,
8640                                                   VkDevice boxed_device, VkRenderPass renderPass,
8641                                                   const VkAllocationCallbacks* pAllocator) {
8642     mImpl->on_vkDestroyRenderPass(pool, boxed_device, renderPass, pAllocator);
8643 }
8644 
on_vkCmdBeginRenderPass(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)8645 void VkDecoderGlobalState::on_vkCmdBeginRenderPass(android::base::BumpPool* pool,
8646                                                    VkCommandBuffer commandBuffer,
8647                                                    const VkRenderPassBeginInfo* pRenderPassBegin,
8648                                                    VkSubpassContents contents) {
8649     return mImpl->on_vkCmdBeginRenderPass(pool, commandBuffer, pRenderPassBegin, contents);
8650 }
8651 
on_vkCmdBeginRenderPass2(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)8652 void VkDecoderGlobalState::on_vkCmdBeginRenderPass2(android::base::BumpPool* pool,
8653                                                     VkCommandBuffer commandBuffer,
8654                                                     const VkRenderPassBeginInfo* pRenderPassBegin,
8655                                                     const VkSubpassBeginInfo* pSubpassBeginInfo) {
8656     return mImpl->on_vkCmdBeginRenderPass2(pool, commandBuffer, pRenderPassBegin,
8657                                            pSubpassBeginInfo);
8658 }
8659 
on_vkCmdBeginRenderPass2KHR(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,const VkSubpassBeginInfo * pSubpassBeginInfo)8660 void VkDecoderGlobalState::on_vkCmdBeginRenderPass2KHR(
8661     android::base::BumpPool* pool, VkCommandBuffer commandBuffer,
8662     const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo) {
8663     return mImpl->on_vkCmdBeginRenderPass2(pool, commandBuffer, pRenderPassBegin,
8664                                            pSubpassBeginInfo);
8665 }
8666 
on_vkCreateFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,const VkFramebufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFramebuffer * pFramebuffer)8667 VkResult VkDecoderGlobalState::on_vkCreateFramebuffer(android::base::BumpPool* pool,
8668                                                       VkDevice boxed_device,
8669                                                       const VkFramebufferCreateInfo* pCreateInfo,
8670                                                       const VkAllocationCallbacks* pAllocator,
8671                                                       VkFramebuffer* pFramebuffer) {
8672     return mImpl->on_vkCreateFramebuffer(pool, boxed_device, pCreateInfo, pAllocator, pFramebuffer);
8673 }
8674 
on_vkDestroyFramebuffer(android::base::BumpPool * pool,VkDevice boxed_device,VkFramebuffer framebuffer,const VkAllocationCallbacks * pAllocator)8675 void VkDecoderGlobalState::on_vkDestroyFramebuffer(android::base::BumpPool* pool,
8676                                                    VkDevice boxed_device, VkFramebuffer framebuffer,
8677                                                    const VkAllocationCallbacks* pAllocator) {
8678     mImpl->on_vkDestroyFramebuffer(pool, boxed_device, framebuffer, pAllocator);
8679 }
8680 
on_vkQueueHostSyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t needHostSync,uint32_t sequenceNumber)8681 void VkDecoderGlobalState::on_vkQueueHostSyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
8682                                                     uint32_t needHostSync,
8683                                                     uint32_t sequenceNumber) {
8684     mImpl->hostSyncQueue("hostSyncQueue", queue, needHostSync, sequenceNumber);
8685 }
8686 
on_vkCmdCopyQueryPoolResults(android::base::BumpPool * pool,VkCommandBuffer commandBuffer,VkQueryPool queryPool,uint32_t firstQuery,uint32_t queryCount,VkBuffer dstBuffer,VkDeviceSize dstOffset,VkDeviceSize stride,VkQueryResultFlags flags)8687 void VkDecoderGlobalState::on_vkCmdCopyQueryPoolResults(android::base::BumpPool* pool,
8688                                                         VkCommandBuffer commandBuffer,
8689                                                         VkQueryPool queryPool, uint32_t firstQuery,
8690                                                         uint32_t queryCount, VkBuffer dstBuffer,
8691                                                         VkDeviceSize dstOffset, VkDeviceSize stride,
8692                                                         VkQueryResultFlags flags) {
8693     mImpl->on_vkCmdCopyQueryPoolResults(pool, commandBuffer, queryPool, firstQuery, queryCount,
8694                                         dstBuffer, dstOffset, stride, flags);
8695 }
8696 
on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)8697 void VkDecoderGlobalState::on_vkQueueSubmitAsyncGOOGLE(android::base::BumpPool* pool, VkQueue queue,
8698                                                        uint32_t submitCount,
8699                                                        const VkSubmitInfo* pSubmits,
8700                                                        VkFence fence) {
8701     mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
8702 }
8703 
on_vkQueueSubmitAsync2GOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t submitCount,const VkSubmitInfo2 * pSubmits,VkFence fence)8704 void VkDecoderGlobalState::on_vkQueueSubmitAsync2GOOGLE(android::base::BumpPool* pool,
8705                                                         VkQueue queue, uint32_t submitCount,
8706                                                         const VkSubmitInfo2* pSubmits,
8707                                                         VkFence fence) {
8708     mImpl->on_vkQueueSubmit(pool, queue, submitCount, pSubmits, fence);
8709 }
8710 
on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue)8711 void VkDecoderGlobalState::on_vkQueueWaitIdleAsyncGOOGLE(android::base::BumpPool* pool,
8712                                                          VkQueue queue) {
8713     mImpl->on_vkQueueWaitIdle(pool, queue);
8714 }
8715 
on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)8716 void VkDecoderGlobalState::on_vkQueueBindSparseAsyncGOOGLE(android::base::BumpPool* pool,
8717                                                            VkQueue queue, uint32_t bindInfoCount,
8718                                                            const VkBindSparseInfo* pBindInfo,
8719                                                            VkFence fence) {
8720     mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
8721 }
8722 
on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool * pool,VkDevice device,VkFormat format,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)8723 void VkDecoderGlobalState::on_vkGetLinearImageLayoutGOOGLE(android::base::BumpPool* pool,
8724                                                            VkDevice device, VkFormat format,
8725                                                            VkDeviceSize* pOffset,
8726                                                            VkDeviceSize* pRowPitchAlignment) {
8727     mImpl->on_vkGetLinearImageLayoutGOOGLE(pool, device, format, pOffset, pRowPitchAlignment);
8728 }
8729 
on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool * pool,VkDevice device,const VkImageCreateInfo * pCreateInfo,VkDeviceSize * pOffset,VkDeviceSize * pRowPitchAlignment)8730 void VkDecoderGlobalState::on_vkGetLinearImageLayout2GOOGLE(android::base::BumpPool* pool,
8731                                                             VkDevice device,
8732                                                             const VkImageCreateInfo* pCreateInfo,
8733                                                             VkDeviceSize* pOffset,
8734                                                             VkDeviceSize* pRowPitchAlignment) {
8735     mImpl->on_vkGetLinearImageLayout2GOOGLE(pool, device, pCreateInfo, pOffset, pRowPitchAlignment);
8736 }
8737 
on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceSize dataSize,const void * pData,const VkDecoderContext & context)8738 void VkDecoderGlobalState::on_vkQueueFlushCommandsGOOGLE(android::base::BumpPool* pool,
8739                                                          VkQueue queue,
8740                                                          VkCommandBuffer commandBuffer,
8741                                                          VkDeviceSize dataSize, const void* pData,
8742                                                          const VkDecoderContext& context) {
8743     mImpl->on_vkQueueFlushCommandsGOOGLE(pool, queue, commandBuffer, dataSize, pData, context);
8744 }
8745 
on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(android::base::BumpPool * pool,VkQueue queue,VkCommandBuffer commandBuffer,VkDeviceMemory deviceMemory,VkDeviceSize dataOffset,VkDeviceSize dataSize,const VkDecoderContext & context)8746 void VkDecoderGlobalState::on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(
8747     android::base::BumpPool* pool, VkQueue queue, VkCommandBuffer commandBuffer,
8748     VkDeviceMemory deviceMemory, VkDeviceSize dataOffset, VkDeviceSize dataSize,
8749     const VkDecoderContext& context) {
8750     mImpl->on_vkQueueFlushCommandsFromAuxMemoryGOOGLE(pool, queue, commandBuffer, deviceMemory,
8751                                                       dataOffset, dataSize, context);
8752 }
8753 
on_vkQueueCommitDescriptorSetUpdatesGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t descriptorPoolCount,const VkDescriptorPool * pDescriptorPools,uint32_t descriptorSetCount,const VkDescriptorSetLayout * pDescriptorSetLayouts,const uint64_t * pDescriptorSetPoolIds,const uint32_t * pDescriptorSetWhichPool,const uint32_t * pDescriptorSetPendingAllocation,const uint32_t * pDescriptorWriteStartingIndices,uint32_t pendingDescriptorWriteCount,const VkWriteDescriptorSet * pPendingDescriptorWrites)8754 void VkDecoderGlobalState::on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
8755     android::base::BumpPool* pool, VkQueue queue, uint32_t descriptorPoolCount,
8756     const VkDescriptorPool* pDescriptorPools, uint32_t descriptorSetCount,
8757     const VkDescriptorSetLayout* pDescriptorSetLayouts, const uint64_t* pDescriptorSetPoolIds,
8758     const uint32_t* pDescriptorSetWhichPool, const uint32_t* pDescriptorSetPendingAllocation,
8759     const uint32_t* pDescriptorWriteStartingIndices, uint32_t pendingDescriptorWriteCount,
8760     const VkWriteDescriptorSet* pPendingDescriptorWrites) {
8761     mImpl->on_vkQueueCommitDescriptorSetUpdatesGOOGLE(
8762         pool, queue, descriptorPoolCount, pDescriptorPools, descriptorSetCount,
8763         pDescriptorSetLayouts, pDescriptorSetPoolIds, pDescriptorSetWhichPool,
8764         pDescriptorSetPendingAllocation, pDescriptorWriteStartingIndices,
8765         pendingDescriptorWriteCount, pPendingDescriptorWrites);
8766 }
8767 
on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool * pool,VkDevice device,VkDescriptorPool descriptorPool,uint32_t * pPoolIdCount,uint64_t * pPoolIds)8768 void VkDecoderGlobalState::on_vkCollectDescriptorPoolIdsGOOGLE(android::base::BumpPool* pool,
8769                                                                VkDevice device,
8770                                                                VkDescriptorPool descriptorPool,
8771                                                                uint32_t* pPoolIdCount,
8772                                                                uint64_t* pPoolIds) {
8773     mImpl->on_vkCollectDescriptorPoolIdsGOOGLE(pool, device, descriptorPool, pPoolIdCount,
8774                                                pPoolIds);
8775 }
8776 
on_vkQueueBindSparse(android::base::BumpPool * pool,VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)8777 VkResult VkDecoderGlobalState::on_vkQueueBindSparse(android::base::BumpPool* pool, VkQueue queue,
8778                                                     uint32_t bindInfoCount,
8779                                                     const VkBindSparseInfo* pBindInfo,
8780                                                     VkFence fence) {
8781     return mImpl->on_vkQueueBindSparse(pool, queue, bindInfoCount, pBindInfo, fence);
8782 }
8783 
on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(android::base::BumpPool * pool,VkQueue queue,uint32_t waitSemaphoreCount,const VkSemaphore * pWaitSemaphores,VkImage image)8784 void VkDecoderGlobalState::on_vkQueueSignalReleaseImageANDROIDAsyncGOOGLE(
8785     android::base::BumpPool* pool, VkQueue queue, uint32_t waitSemaphoreCount,
8786     const VkSemaphore* pWaitSemaphores, VkImage image) {
8787     int fenceFd;
8788     mImpl->on_vkQueueSignalReleaseImageANDROID(pool, queue, waitSemaphoreCount, pWaitSemaphores,
8789                                                image, &fenceFd);
8790 }
8791 
on_vkCreateSamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)8792 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversion(
8793     android::base::BumpPool* pool, VkDevice device,
8794     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
8795     VkSamplerYcbcrConversion* pYcbcrConversion) {
8796     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
8797                                                     pYcbcrConversion);
8798 }
8799 
on_vkCreateSamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)8800 VkResult VkDecoderGlobalState::on_vkCreateSamplerYcbcrConversionKHR(
8801     android::base::BumpPool* pool, VkDevice device,
8802     const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator,
8803     VkSamplerYcbcrConversion* pYcbcrConversion) {
8804     return mImpl->on_vkCreateSamplerYcbcrConversion(pool, device, pCreateInfo, pAllocator,
8805                                                     pYcbcrConversion);
8806 }
8807 
on_vkDestroySamplerYcbcrConversion(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)8808 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversion(
8809     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
8810     const VkAllocationCallbacks* pAllocator) {
8811     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
8812 }
8813 
on_vkDestroySamplerYcbcrConversionKHR(android::base::BumpPool * pool,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)8814 void VkDecoderGlobalState::on_vkDestroySamplerYcbcrConversionKHR(
8815     android::base::BumpPool* pool, VkDevice device, VkSamplerYcbcrConversion ycbcrConversion,
8816     const VkAllocationCallbacks* pAllocator) {
8817     mImpl->on_vkDestroySamplerYcbcrConversion(pool, device, ycbcrConversion, pAllocator);
8818 }
8819 
on_DeviceLost()8820 void VkDecoderGlobalState::on_DeviceLost() { mImpl->on_DeviceLost(); }
8821 
DeviceLostHandler()8822 void VkDecoderGlobalState::DeviceLostHandler() { mImpl->DeviceLostHandler(); }
8823 
on_CheckOutOfMemory(VkResult result,uint32_t opCode,const VkDecoderContext & context,std::optional<uint64_t> allocationSize)8824 void VkDecoderGlobalState::on_CheckOutOfMemory(VkResult result, uint32_t opCode,
8825                                                const VkDecoderContext& context,
8826                                                std::optional<uint64_t> allocationSize) {
8827     mImpl->on_CheckOutOfMemory(result, opCode, context, allocationSize);
8828 }
8829 
waitForFence(VkFence boxed_fence,uint64_t timeout)8830 VkResult VkDecoderGlobalState::waitForFence(VkFence boxed_fence, uint64_t timeout) {
8831     return mImpl->waitForFence(boxed_fence, timeout);
8832 }
8833 
getFenceStatus(VkFence boxed_fence)8834 VkResult VkDecoderGlobalState::getFenceStatus(VkFence boxed_fence) {
8835     return mImpl->getFenceStatus(boxed_fence);
8836 }
8837 
registerQsriCallback(VkImage image,VkQsriTimeline::Callback callback)8838 AsyncResult VkDecoderGlobalState::registerQsriCallback(VkImage image,
8839                                                        VkQsriTimeline::Callback callback) {
8840     return mImpl->registerQsriCallback(image, std::move(callback));
8841 }
8842 
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)8843 void VkDecoderGlobalState::deviceMemoryTransform_tohost(VkDeviceMemory* memory,
8844                                                         uint32_t memoryCount, VkDeviceSize* offset,
8845                                                         uint32_t offsetCount, VkDeviceSize* size,
8846                                                         uint32_t sizeCount, uint32_t* typeIndex,
8847                                                         uint32_t typeIndexCount, uint32_t* typeBits,
8848                                                         uint32_t typeBitsCount) {
8849     // Not used currently
8850     (void)memory;
8851     (void)memoryCount;
8852     (void)offset;
8853     (void)offsetCount;
8854     (void)size;
8855     (void)sizeCount;
8856     (void)typeIndex;
8857     (void)typeIndexCount;
8858     (void)typeBits;
8859     (void)typeBitsCount;
8860 }
8861 
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)8862 void VkDecoderGlobalState::deviceMemoryTransform_fromhost(
8863     VkDeviceMemory* memory, uint32_t memoryCount, VkDeviceSize* offset, uint32_t offsetCount,
8864     VkDeviceSize* size, uint32_t sizeCount, uint32_t* typeIndex, uint32_t typeIndexCount,
8865     uint32_t* typeBits, uint32_t typeBitsCount) {
8866     // Not used currently
8867     (void)memory;
8868     (void)memoryCount;
8869     (void)offset;
8870     (void)offsetCount;
8871     (void)size;
8872     (void)sizeCount;
8873     (void)typeIndex;
8874     (void)typeIndexCount;
8875     (void)typeBits;
8876     (void)typeBitsCount;
8877 }
8878 
snapshot()8879 VkDecoderSnapshot* VkDecoderGlobalState::snapshot() { return mImpl->snapshot(); }
8880 
8881 #define DEFINE_TRANSFORMED_TYPE_IMPL(type)                                                        \
8882     void VkDecoderGlobalState::transformImpl_##type##_tohost(const type* val, uint32_t count) {   \
8883         mImpl->transformImpl_##type##_tohost(val, count);                                         \
8884     }                                                                                             \
8885     void VkDecoderGlobalState::transformImpl_##type##_fromhost(const type* val, uint32_t count) { \
8886         mImpl->transformImpl_##type##_fromhost(val, count);                                       \
8887     }
8888 
8889 LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
8890 
8891 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF(type)                                         \
8892     type VkDecoderGlobalState::new_boxed_##type(type underlying, VulkanDispatch* dispatch,     \
8893                                                 bool ownDispatch) {                            \
8894         return mImpl->new_boxed_##type(underlying, dispatch, ownDispatch);                     \
8895     }                                                                                          \
8896     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
8897     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
8898     type VkDecoderGlobalState::unboxed_to_boxed_##type(type unboxed) {                         \
8899         return mImpl->unboxed_to_boxed_##type(unboxed);                                        \
8900     }                                                                                          \
8901     VulkanDispatch* VkDecoderGlobalState::dispatch_##type(type boxed) {                        \
8902         return mImpl->dispatch_##type(boxed);                                                  \
8903     }
8904 
8905 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF(type)                                     \
8906     type VkDecoderGlobalState::new_boxed_non_dispatchable_##type(type underlying) {            \
8907         return mImpl->new_boxed_non_dispatchable_##type(underlying);                           \
8908     }                                                                                          \
8909     void VkDecoderGlobalState::delete_##type(type boxed) { mImpl->delete_##type(boxed); }      \
8910     type VkDecoderGlobalState::unbox_##type(type boxed) { return mImpl->unbox_##type(boxed); } \
8911     type VkDecoderGlobalState::unboxed_to_boxed_non_dispatchable_##type(type unboxed) {        \
8912         return mImpl->unboxed_to_boxed_non_dispatchable_##type(unboxed);                       \
8913     }
8914 
GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)8915 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_API_DEF)
8916 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_API_DEF)
8917 
8918 #define DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                     \
8919     type unbox_##type(type boxed) {                                                               \
8920         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8921         if (!elt) return VK_NULL_HANDLE;                                                          \
8922         return (type)elt->underlying;                                                             \
8923     }                                                                                             \
8924     VulkanDispatch* dispatch_##type(type boxed) {                                                 \
8925         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8926         if (!elt) {                                                                               \
8927             fprintf(stderr, "%s: err not found boxed %p\n", __func__, boxed);                     \
8928             return nullptr;                                                                       \
8929         }                                                                                         \
8930         return elt->dispatch;                                                                     \
8931     }                                                                                             \
8932     void delete_##type(type boxed) {                                                              \
8933         if (!boxed) return;                                                                       \
8934         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8935         if (!elt) return;                                                                         \
8936         releaseOrderMaintInfo(elt->ordMaintInfo);                                                 \
8937         if (elt->readStream) {                                                                    \
8938             sReadStreamRegistry.push(elt->readStream);                                            \
8939             elt->readStream = nullptr;                                                            \
8940         }                                                                                         \
8941         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
8942     }                                                                                             \
8943     type unboxed_to_boxed_##type(type unboxed) {                                                  \
8944         AutoLock lock(sBoxedHandleManager.lock);                                                  \
8945         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
8946     }
8947 
8948 #define DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF(type)                                 \
8949     type new_boxed_non_dispatchable_##type(type underlying) {                                     \
8950         return VkDecoderGlobalState::get()->new_boxed_non_dispatchable_##type(underlying);        \
8951     }                                                                                             \
8952     void delete_##type(type boxed) {                                                              \
8953         if (!boxed) return;                                                                       \
8954         sBoxedHandleManager.remove((uint64_t)boxed);                                              \
8955     }                                                                                             \
8956     void delayed_delete_##type(type boxed, VkDevice device, std::function<void()> callback) {     \
8957         sBoxedHandleManager.removeDelayed((uint64_t)boxed, device, callback);                     \
8958     }                                                                                             \
8959     type unbox_##type(type boxed) {                                                               \
8960         if (!boxed) return boxed;                                                                 \
8961         auto elt = sBoxedHandleManager.get((uint64_t)(uintptr_t)boxed);                           \
8962         if (!elt) {                                                                               \
8963             GFXSTREAM_ABORT(FatalError(ABORT_REASON_OTHER))                                       \
8964                 << "Unbox " << boxed << " failed, not found.";                                    \
8965             return VK_NULL_HANDLE;                                                                \
8966         }                                                                                         \
8967         return (type)elt->underlying;                                                             \
8968     }                                                                                             \
8969     type unboxed_to_boxed_non_dispatchable_##type(type unboxed) {                                 \
8970         AutoLock lock(sBoxedHandleManager.lock);                                                  \
8971         return (type)sBoxedHandleManager.getBoxedFromUnboxedLocked((uint64_t)(uintptr_t)unboxed); \
8972     }
8973 
8974 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
8975 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(DEFINE_BOXED_NON_DISPATCHABLE_HANDLE_GLOBAL_API_DEF)
8976 
8977 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::setup(android::base::BumpPool* pool,
8978                                                            uint64_t** bufPtr) {
8979     mPool = pool;
8980     mPreserveBufPtr = bufPtr;
8981 }
8982 
allocPreserve(size_t count)8983 void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::allocPreserve(size_t count) {
8984     *mPreserveBufPtr = (uint64_t*)mPool->alloc(count * sizeof(uint64_t));
8985 }
8986 
8987 #define BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)        \
8988     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
8989         type_name* handles, size_t count) {                                               \
8990         allocPreserve(count);                                                             \
8991         for (size_t i = 0; i < count; ++i) {                                              \
8992             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
8993             if (handles[i]) {                                                             \
8994                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
8995             } else {                                                                      \
8996                 handles[i] = (type_name) nullptr;                                         \
8997             };                                                                            \
8998         }                                                                                 \
8999     }                                                                                     \
9000     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
9001         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
9002         allocPreserve(count);                                                             \
9003         for (size_t i = 0; i < count; ++i) {                                              \
9004             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
9005             if (handles[i]) {                                                             \
9006                 handle_u64s[i] =                                                          \
9007                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
9008             } else {                                                                      \
9009                 handle_u64s[i] = 0;                                                       \
9010             }                                                                             \
9011         }                                                                                 \
9012     }                                                                                     \
9013     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
9014         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
9015         allocPreserve(count);                                                             \
9016         for (size_t i = 0; i < count; ++i) {                                              \
9017             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
9018             if (handle_u64s[i]) {                                                         \
9019                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
9020                     (type_name)(uintptr_t)handle_u64s[i]);                                \
9021             } else {                                                                      \
9022                 handles[i] = (type_name) nullptr;                                         \
9023             }                                                                             \
9024         }                                                                                 \
9025     }
9026 
9027 #define BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL(type_name)    \
9028     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name(          \
9029         type_name* handles, size_t count) {                                               \
9030         allocPreserve(count);                                                             \
9031         for (size_t i = 0; i < count; ++i) {                                              \
9032             (*mPreserveBufPtr)[i] = (uint64_t)(handles[i]);                               \
9033             if (handles[i]) {                                                             \
9034                 auto boxed = handles[i];                                                  \
9035                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(handles[i]);  \
9036                 delete_##type_name(boxed);                                                \
9037             } else {                                                                      \
9038                 handles[i] = (type_name) nullptr;                                         \
9039             };                                                                            \
9040         }                                                                                 \
9041     }                                                                                     \
9042     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_##type_name##_u64(    \
9043         const type_name* handles, uint64_t* handle_u64s, size_t count) {                  \
9044         allocPreserve(count);                                                             \
9045         for (size_t i = 0; i < count; ++i) {                                              \
9046             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
9047             if (handles[i]) {                                                             \
9048                 auto boxed = handles[i];                                                  \
9049                 handle_u64s[i] =                                                          \
9050                     (uint64_t)VkDecoderGlobalState::get()->unbox_##type_name(handles[i]); \
9051                 delete_##type_name(boxed);                                                \
9052             } else {                                                                      \
9053                 handle_u64s[i] = 0;                                                       \
9054             }                                                                             \
9055         }                                                                                 \
9056     }                                                                                     \
9057     void BoxedHandleUnwrapAndDeletePreserveBoxedMapping::mapHandles_u64_##type_name(      \
9058         const uint64_t* handle_u64s, type_name* handles, size_t count) {                  \
9059         allocPreserve(count);                                                             \
9060         for (size_t i = 0; i < count; ++i) {                                              \
9061             (*mPreserveBufPtr)[i] = (uint64_t)(handle_u64s[i]);                           \
9062             if (handle_u64s[i]) {                                                         \
9063                 auto boxed = (type_name)(uintptr_t)handle_u64s[i];                        \
9064                 handles[i] = VkDecoderGlobalState::get()->unbox_##type_name(              \
9065                     (type_name)(uintptr_t)handle_u64s[i]);                                \
9066                 delete_##type_name(boxed);                                                \
9067             } else {                                                                      \
9068                 handles[i] = (type_name) nullptr;                                         \
9069             }                                                                             \
9070         }                                                                                 \
9071     }
9072 
9073 GOLDFISH_VK_LIST_DISPATCHABLE_HANDLE_TYPES(
9074     BOXED_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
9075 GOLDFISH_VK_LIST_NON_DISPATCHABLE_HANDLE_TYPES(
9076     BOXED_NON_DISPATCHABLE_HANDLE_UNWRAP_AND_DELETE_PRESERVE_BOXED_IMPL)
9077 
9078 }  // namespace vk
9079 }  // namespace gfxstream
9080