1 /*
2 * Copyright 2016 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "VkTestContext.h"
9
10 #ifdef SK_VULKAN
11
12 #include "GrContext.h"
13 #include "VkTestUtils.h"
14 #include "vk/GrVkExtensions.h"
15
16 namespace {
17
18 #define ACQUIRE_VK_PROC(name, device) \
19 f##name = reinterpret_cast<PFN_vk##name>(getProc("vk" #name, nullptr, device)); \
20 SkASSERT(f##name)
21
22 /**
23 * Implements sk_gpu_test::FenceSync for Vulkan. It creates a single command
24 * buffer with USAGE_SIMULTANEOUS with no content . On every insertFence request
25 * it submits the command buffer with a new fence.
26 */
27 class VkFenceSync : public sk_gpu_test::FenceSync {
28 public:
VkFenceSync(GrVkGetProc getProc,VkDevice device,VkQueue queue,uint32_t queueFamilyIndex)29 VkFenceSync(GrVkGetProc getProc, VkDevice device, VkQueue queue,
30 uint32_t queueFamilyIndex)
31 : fDevice(device)
32 , fQueue(queue) {
33 ACQUIRE_VK_PROC(CreateCommandPool, device);
34 ACQUIRE_VK_PROC(DestroyCommandPool, device);
35 ACQUIRE_VK_PROC(AllocateCommandBuffers, device);
36 ACQUIRE_VK_PROC(FreeCommandBuffers, device);
37 ACQUIRE_VK_PROC(BeginCommandBuffer, device);
38 ACQUIRE_VK_PROC(EndCommandBuffer, device);
39 ACQUIRE_VK_PROC(CreateFence, device);
40 ACQUIRE_VK_PROC(DestroyFence, device);
41 ACQUIRE_VK_PROC(WaitForFences, device);
42 ACQUIRE_VK_PROC(QueueSubmit, device);
43
44 VkResult result;
45 SkDEBUGCODE(fUnfinishedSyncs = 0;)
46 VkCommandPoolCreateInfo createInfo;
47 createInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
48 createInfo.pNext = nullptr;
49 createInfo.flags = 0;
50 createInfo.queueFamilyIndex = queueFamilyIndex;
51 result = fCreateCommandPool(fDevice, &createInfo, nullptr, &fCommandPool);
52 SkASSERT(VK_SUCCESS == result);
53
54 VkCommandBufferAllocateInfo allocateInfo;
55 allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
56 allocateInfo.pNext = nullptr;
57 allocateInfo.commandBufferCount = 1;
58 allocateInfo.commandPool = fCommandPool;
59 allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
60 result = fAllocateCommandBuffers(fDevice, &allocateInfo, &fCommandBuffer);
61 SkASSERT(VK_SUCCESS == result);
62
63 VkCommandBufferBeginInfo beginInfo;
64 beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
65 beginInfo.pNext = nullptr;
66 beginInfo.flags = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT;
67 beginInfo.pInheritanceInfo = nullptr;
68 result = fBeginCommandBuffer(fCommandBuffer, &beginInfo);
69 SkASSERT(VK_SUCCESS == result);
70 result = fEndCommandBuffer(fCommandBuffer);
71 SkASSERT(VK_SUCCESS == result);
72
73 }
74
~VkFenceSync()75 ~VkFenceSync() override {
76 SkASSERT(!fUnfinishedSyncs);
77 // If the above assertion is true then the command buffer should not be in flight.
78 fFreeCommandBuffers(fDevice, fCommandPool, 1, &fCommandBuffer);
79 fDestroyCommandPool(fDevice, fCommandPool, nullptr);
80 }
81
insertFence() const82 sk_gpu_test::PlatformFence SK_WARN_UNUSED_RESULT insertFence() const override {
83 VkResult result;
84
85 VkFence fence;
86 VkFenceCreateInfo info;
87 info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
88 info.pNext = nullptr;
89 info.flags = 0;
90 result = fCreateFence(fDevice, &info, nullptr, &fence);
91 SkASSERT(VK_SUCCESS == result);
92
93 VkSubmitInfo submitInfo;
94 submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
95 submitInfo.pNext = nullptr;
96 submitInfo.waitSemaphoreCount = 0;
97 submitInfo.pWaitSemaphores = nullptr;
98 submitInfo.pWaitDstStageMask = nullptr;
99 submitInfo.commandBufferCount = 1;
100 submitInfo.pCommandBuffers = &fCommandBuffer;
101 submitInfo.signalSemaphoreCount = 0;
102 submitInfo.pSignalSemaphores = nullptr;
103 result = fQueueSubmit(fQueue, 1, &submitInfo, fence);
104 SkASSERT(VK_SUCCESS == result);
105
106 SkDEBUGCODE(++fUnfinishedSyncs;)
107 return (sk_gpu_test::PlatformFence)fence;
108 }
109
waitFence(sk_gpu_test::PlatformFence opaqueFence) const110 bool waitFence(sk_gpu_test::PlatformFence opaqueFence) const override {
111 VkFence fence = (VkFence)opaqueFence;
112 static constexpr uint64_t kForever = ~((uint64_t)0);
113 auto result = fWaitForFences(fDevice, 1, &fence, true, kForever);
114 return result != VK_TIMEOUT;
115 }
116
deleteFence(sk_gpu_test::PlatformFence opaqueFence) const117 void deleteFence(sk_gpu_test::PlatformFence opaqueFence) const override {
118 VkFence fence = (VkFence)opaqueFence;
119 fDestroyFence(fDevice, fence, nullptr);
120 SkDEBUGCODE(--fUnfinishedSyncs;)
121 }
122
123 private:
124 VkDevice fDevice;
125 VkQueue fQueue;
126 VkCommandPool fCommandPool;
127 VkCommandBuffer fCommandBuffer;
128
129 PFN_vkCreateCommandPool fCreateCommandPool = nullptr;
130 PFN_vkDestroyCommandPool fDestroyCommandPool = nullptr;
131 PFN_vkAllocateCommandBuffers fAllocateCommandBuffers = nullptr;
132 PFN_vkFreeCommandBuffers fFreeCommandBuffers = nullptr;
133 PFN_vkBeginCommandBuffer fBeginCommandBuffer = nullptr;
134 PFN_vkEndCommandBuffer fEndCommandBuffer = nullptr;
135 PFN_vkCreateFence fCreateFence = nullptr;
136 PFN_vkDestroyFence fDestroyFence = nullptr;
137 PFN_vkWaitForFences fWaitForFences = nullptr;
138 PFN_vkQueueSubmit fQueueSubmit = nullptr;
139
140 SkDEBUGCODE(mutable int fUnfinishedSyncs;)
141 typedef sk_gpu_test::FenceSync INHERITED;
142 };
143
144 GR_STATIC_ASSERT(sizeof(VkFence) <= sizeof(sk_gpu_test::PlatformFence));
145
146 // TODO: Implement swap buffers and finish
147 class VkTestContextImpl : public sk_gpu_test::VkTestContext {
148 public:
Create(VkTestContext * sharedContext)149 static VkTestContext* Create(VkTestContext* sharedContext) {
150 GrVkBackendContext backendContext;
151 GrVkExtensions* extensions;
152 VkPhysicalDeviceFeatures2* features;
153 bool ownsContext = true;
154 VkDebugReportCallbackEXT debugCallback = VK_NULL_HANDLE;
155 PFN_vkDestroyDebugReportCallbackEXT destroyCallback = nullptr;
156 if (sharedContext) {
157 backendContext = sharedContext->getVkBackendContext();
158 extensions = const_cast<GrVkExtensions*>(sharedContext->getVkExtensions());
159 features = const_cast<VkPhysicalDeviceFeatures2*>(sharedContext->getVkFeatures());
160 // We always delete the parent context last so make sure the child does not think they
161 // own the vulkan context.
162 ownsContext = false;
163 } else {
164 PFN_vkGetInstanceProcAddr instProc;
165 PFN_vkGetDeviceProcAddr devProc;
166 if (!sk_gpu_test::LoadVkLibraryAndGetProcAddrFuncs(&instProc, &devProc)) {
167 return nullptr;
168 }
169 auto getProc = [instProc, devProc](const char* proc_name,
170 VkInstance instance, VkDevice device) {
171 if (device != VK_NULL_HANDLE) {
172 return devProc(device, proc_name);
173 }
174 return instProc(instance, proc_name);
175 };
176 extensions = new GrVkExtensions();
177 features = new VkPhysicalDeviceFeatures2;
178 memset(features, 0, sizeof(VkPhysicalDeviceFeatures2));
179 if (!sk_gpu_test::CreateVkBackendContext(getProc, &backendContext, extensions,
180 features, &debugCallback)) {
181 sk_gpu_test::FreeVulkanFeaturesStructs(features);
182 delete features;
183 delete extensions;
184 return nullptr;
185 }
186 if (debugCallback != VK_NULL_HANDLE) {
187 destroyCallback = (PFN_vkDestroyDebugReportCallbackEXT) instProc(
188 backendContext.fInstance, "vkDestroyDebugReportCallbackEXT");
189 }
190 }
191 return new VkTestContextImpl(backendContext, extensions, features, ownsContext,
192 debugCallback, destroyCallback);
193 }
194
~VkTestContextImpl()195 ~VkTestContextImpl() override { this->teardown(); }
196
testAbandon()197 void testAbandon() override {}
198
199 // There is really nothing to here since we don't own any unqueued command buffers here.
submit()200 void submit() override {}
201
finish()202 void finish() override {}
203
makeGrContext(const GrContextOptions & options)204 sk_sp<GrContext> makeGrContext(const GrContextOptions& options) override {
205 return GrContext::MakeVulkan(fVk, options);
206 }
207
208 protected:
209 #define ACQUIRE_VK_PROC_LOCAL(name, inst) \
210 PFN_vk##name grVk##name = \
211 reinterpret_cast<PFN_vk##name>(fVk.fGetProc("vk" #name, inst, nullptr)); \
212 do { \
213 if (grVk##name == nullptr) { \
214 SkDebugf("Function ptr for vk%s could not be acquired\n", #name); \
215 return; \
216 } \
217 } while (0)
218
teardown()219 void teardown() override {
220 INHERITED::teardown();
221 fVk.fMemoryAllocator.reset();
222 if (fOwnsContext) {
223 ACQUIRE_VK_PROC_LOCAL(DeviceWaitIdle, fVk.fInstance);
224 ACQUIRE_VK_PROC_LOCAL(DestroyDevice, fVk.fInstance);
225 ACQUIRE_VK_PROC_LOCAL(DestroyInstance, fVk.fInstance);
226 grVkDeviceWaitIdle(fVk.fDevice);
227 grVkDestroyDevice(fVk.fDevice, nullptr);
228 #ifdef SK_ENABLE_VK_LAYERS
229 if (fDebugCallback != VK_NULL_HANDLE) {
230 fDestroyDebugReportCallbackEXT(fVk.fInstance, fDebugCallback, nullptr);
231 }
232 #endif
233 grVkDestroyInstance(fVk.fInstance, nullptr);
234 delete fExtensions;
235
236 sk_gpu_test::FreeVulkanFeaturesStructs(fFeatures);
237 delete fFeatures;
238 }
239 }
240
241 private:
VkTestContextImpl(const GrVkBackendContext & backendContext,const GrVkExtensions * extensions,VkPhysicalDeviceFeatures2 * features,bool ownsContext,VkDebugReportCallbackEXT debugCallback,PFN_vkDestroyDebugReportCallbackEXT destroyCallback)242 VkTestContextImpl(const GrVkBackendContext& backendContext, const GrVkExtensions* extensions,
243 VkPhysicalDeviceFeatures2* features, bool ownsContext,
244 VkDebugReportCallbackEXT debugCallback,
245 PFN_vkDestroyDebugReportCallbackEXT destroyCallback)
246 : VkTestContext(backendContext, extensions, features, ownsContext, debugCallback,
247 destroyCallback) {
248 fFenceSync.reset(new VkFenceSync(fVk.fGetProc, fVk.fDevice, fVk.fQueue,
249 fVk.fGraphicsQueueIndex));
250 }
251
onPlatformMakeCurrent() const252 void onPlatformMakeCurrent() const override {}
onPlatformGetAutoContextRestore() const253 std::function<void()> onPlatformGetAutoContextRestore() const override { return nullptr; }
onPlatformSwapBuffers() const254 void onPlatformSwapBuffers() const override {}
255
256 typedef sk_gpu_test::VkTestContext INHERITED;
257 };
258 } // anonymous namespace
259
260 namespace sk_gpu_test {
CreatePlatformVkTestContext(VkTestContext * sharedContext)261 VkTestContext* CreatePlatformVkTestContext(VkTestContext* sharedContext) {
262 return VkTestContextImpl::Create(sharedContext);
263 }
264 } // namespace sk_gpu_test
265
266 #endif
267