1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2016 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file
21 * \brief Synchronization operation abstraction
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktSynchronizationOperation.hpp"
25 #include "vkDefs.hpp"
26 #include "vktTestCase.hpp"
27 #include "vktTestCaseUtil.hpp"
28 #include "vkRef.hpp"
29 #include "vkRefUtil.hpp"
30 #include "vkMemUtil.hpp"
31 #include "vkBarrierUtil.hpp"
32 #include "vkQueryUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkImageUtil.hpp"
35 #include "vkBuilderUtil.hpp"
36 #include "vkCmdUtil.hpp"
37 #include "vkObjUtil.hpp"
38 #include "deUniquePtr.hpp"
39 #include "tcuTestLog.hpp"
40 #include "tcuTextureUtil.hpp"
41 #include <vector>
42 #include <sstream>
43
44 namespace vkt
45 {
46 namespace synchronization
47 {
48 namespace
49 {
50 using namespace vk;
51
52 enum Constants
53 {
54 MAX_IMAGE_DIMENSION_2D = 0x1000u,
55 MAX_UBO_RANGE = 0x4000u,
56 MAX_UPDATE_BUFFER_SIZE = 0x10000u,
57 };
58
59 enum BufferType
60 {
61 BUFFER_TYPE_UNIFORM,
62 BUFFER_TYPE_STORAGE,
63 };
64
65 enum AccessMode
66 {
67 ACCESS_MODE_READ,
68 ACCESS_MODE_WRITE,
69 };
70
71 enum PipelineType
72 {
73 PIPELINE_TYPE_GRAPHICS,
74 PIPELINE_TYPE_COMPUTE,
75 };
76
77 static const char* const s_perVertexBlock = "gl_PerVertex {\n"
78 " vec4 gl_Position;\n"
79 "}";
80
81 static const SyncInfo emptySyncInfo =
82 {
83 0, // VkPipelineStageFlags stageMask;
84 0, // VkAccessFlags accessMask;
85 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
86 };
87
getShaderStageName(VkShaderStageFlagBits stage)88 std::string getShaderStageName(VkShaderStageFlagBits stage)
89 {
90 switch (stage)
91 {
92 default:
93 DE_FATAL("Unhandled stage!");
94 return "";
95 case VK_SHADER_STAGE_COMPUTE_BIT:
96 return "compute";
97 case VK_SHADER_STAGE_FRAGMENT_BIT:
98 return "fragment";
99 case VK_SHADER_STAGE_VERTEX_BIT:
100 return "vertex";
101 case VK_SHADER_STAGE_GEOMETRY_BIT:
102 return "geometry";
103 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
104 return "tess_control";
105 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
106 return "tess_eval";
107 }
108 }
109
110 //! A pipeline that can be embedded inside an operation.
111 class Pipeline
112 {
113 public:
~Pipeline(void)114 virtual ~Pipeline (void) {}
115 virtual void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet) = 0;
116 };
117
118 //! Vertex data that covers the whole viewport with two triangles.
119 class VertexGrid
120 {
121 public:
VertexGrid(OperationContext & context)122 VertexGrid (OperationContext& context)
123 : m_vertexFormat (VK_FORMAT_R32G32B32A32_SFLOAT)
124 , m_vertexStride (tcu::getPixelSize(mapVkFormat(m_vertexFormat)))
125 {
126 const DeviceInterface& vk = context.getDeviceInterface();
127 const VkDevice device = context.getDevice();
128 Allocator& allocator = context.getAllocator();
129
130 // Vertex positions
131 {
132 m_vertexData.push_back(tcu::Vec4( 1.0f, 1.0f, 0.0f, 1.0f));
133 m_vertexData.push_back(tcu::Vec4(-1.0f, 1.0f, 0.0f, 1.0f));
134 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
135
136 m_vertexData.push_back(tcu::Vec4(-1.0f, -1.0f, 0.0f, 1.0f));
137 m_vertexData.push_back(tcu::Vec4( 1.0f, -1.0f, 0.0f, 1.0f));
138 m_vertexData.push_back(tcu::Vec4( 1.0f, 1.0f, 0.0f, 1.0f));
139 }
140
141 {
142 const VkDeviceSize vertexDataSizeBytes = m_vertexData.size() * sizeof(m_vertexData[0]);
143
144 m_vertexBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexDataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
145 DE_ASSERT(sizeof(m_vertexData[0]) == m_vertexStride);
146
147 {
148 const Allocation& alloc = m_vertexBuffer->getAllocation();
149
150 deMemcpy(alloc.getHostPtr(), &m_vertexData[0], static_cast<std::size_t>(vertexDataSizeBytes));
151 flushAlloc(vk, device, alloc);
152 }
153 }
154
155 // Indices
156 {
157 const VkDeviceSize indexBufferSizeBytes = sizeof(deUint32) * m_vertexData.size();
158 const deUint32 numIndices = static_cast<deUint32>(m_vertexData.size());
159
160 m_indexBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(indexBufferSizeBytes, VK_BUFFER_USAGE_INDEX_BUFFER_BIT), MemoryRequirement::HostVisible));
161
162 {
163 const Allocation& alloc = m_indexBuffer->getAllocation();
164 deUint32* const pData = static_cast<deUint32*>(alloc.getHostPtr());
165
166 for (deUint32 i = 0; i < numIndices; ++i)
167 pData[i] = i;
168
169 flushAlloc(vk, device, alloc);
170 }
171 }
172 }
173
getVertexFormat(void) const174 VkFormat getVertexFormat (void) const { return m_vertexFormat; }
getVertexStride(void) const175 deUint32 getVertexStride (void) const { return m_vertexStride; }
getIndexType(void) const176 VkIndexType getIndexType (void) const { return VK_INDEX_TYPE_UINT32; }
getNumVertices(void) const177 deUint32 getNumVertices (void) const { return static_cast<deUint32>(m_vertexData.size()); }
getNumIndices(void) const178 deUint32 getNumIndices (void) const { return getNumVertices(); }
getVertexBuffer(void) const179 VkBuffer getVertexBuffer (void) const { return **m_vertexBuffer; }
getIndexBuffer(void) const180 VkBuffer getIndexBuffer (void) const { return **m_indexBuffer; }
181
182 private:
183 const VkFormat m_vertexFormat;
184 const deUint32 m_vertexStride;
185 std::vector<tcu::Vec4> m_vertexData;
186 de::MovePtr<Buffer> m_vertexBuffer;
187 de::MovePtr<Buffer> m_indexBuffer;
188 };
189
190 //! Add flags for all shader stages required to support a particular stage (e.g. fragment requires vertex as well).
getRequiredStages(const VkShaderStageFlagBits stage)191 VkShaderStageFlags getRequiredStages (const VkShaderStageFlagBits stage)
192 {
193 VkShaderStageFlags flags = 0;
194
195 DE_ASSERT(stage == VK_SHADER_STAGE_COMPUTE_BIT || (stage & VK_SHADER_STAGE_COMPUTE_BIT) == 0);
196
197 if (stage & VK_SHADER_STAGE_ALL_GRAPHICS)
198 flags |= VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT;
199
200 if (stage & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
201 flags |= VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
202
203 if (stage & VK_SHADER_STAGE_GEOMETRY_BIT)
204 flags |= VK_SHADER_STAGE_GEOMETRY_BIT;
205
206 if (stage & VK_SHADER_STAGE_COMPUTE_BIT)
207 flags |= VK_SHADER_STAGE_COMPUTE_BIT;
208
209 return flags;
210 }
211
212 //! Check that SSBO read/write is available and that all shader stages are supported.
requireFeaturesForSSBOAccess(OperationContext & context,const VkShaderStageFlags usedStages)213 void requireFeaturesForSSBOAccess (OperationContext& context, const VkShaderStageFlags usedStages)
214 {
215 const InstanceInterface& vki = context.getInstanceInterface();
216 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
217 FeatureFlags flags = (FeatureFlags)0;
218
219 if (usedStages & VK_SHADER_STAGE_FRAGMENT_BIT)
220 flags |= FEATURE_FRAGMENT_STORES_AND_ATOMICS;
221
222 if (usedStages & (VK_SHADER_STAGE_ALL_GRAPHICS & (~VK_SHADER_STAGE_FRAGMENT_BIT)))
223 flags |= FEATURE_VERTEX_PIPELINE_STORES_AND_ATOMICS;
224
225 if (usedStages & VK_SHADER_STAGE_GEOMETRY_BIT)
226 flags |= FEATURE_GEOMETRY_SHADER;
227
228 if (usedStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
229 flags |= FEATURE_TESSELLATION_SHADER;
230
231 requireFeatures(vki, physDevice, flags);
232 }
233
getHostBufferData(const OperationContext & context,const Buffer & hostBuffer,const VkDeviceSize size)234 Data getHostBufferData (const OperationContext& context, const Buffer& hostBuffer, const VkDeviceSize size)
235 {
236 const DeviceInterface& vk = context.getDeviceInterface();
237 const VkDevice device = context.getDevice();
238 const Allocation& alloc = hostBuffer.getAllocation();
239 const Data data =
240 {
241 static_cast<std::size_t>(size), // std::size_t size;
242 static_cast<deUint8*>(alloc.getHostPtr()), // const deUint8* data;
243 };
244
245 invalidateAlloc(vk, device, alloc);
246
247 return data;
248 }
249
setHostBufferData(const OperationContext & context,const Buffer & hostBuffer,const Data & data)250 void setHostBufferData (const OperationContext& context, const Buffer& hostBuffer, const Data& data)
251 {
252 const DeviceInterface& vk = context.getDeviceInterface();
253 const VkDevice device = context.getDevice();
254 const Allocation& alloc = hostBuffer.getAllocation();
255
256 deMemcpy(alloc.getHostPtr(), data.data, data.size);
257 flushAlloc(vk, device, alloc);
258 }
259
assertValidShaderStage(const VkShaderStageFlagBits stage)260 void assertValidShaderStage (const VkShaderStageFlagBits stage)
261 {
262 switch (stage)
263 {
264 case VK_SHADER_STAGE_VERTEX_BIT:
265 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
266 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
267 case VK_SHADER_STAGE_GEOMETRY_BIT:
268 case VK_SHADER_STAGE_FRAGMENT_BIT:
269 case VK_SHADER_STAGE_COMPUTE_BIT:
270 // OK
271 break;
272
273 default:
274 DE_FATAL("Invalid shader stage");
275 break;
276 }
277 }
278
pipelineStageFlagsFromShaderStageFlagBits(const VkShaderStageFlagBits shaderStage)279 VkPipelineStageFlags pipelineStageFlagsFromShaderStageFlagBits (const VkShaderStageFlagBits shaderStage)
280 {
281 switch (shaderStage)
282 {
283 case VK_SHADER_STAGE_VERTEX_BIT: return VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR;
284 case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT: return VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR;
285 case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT: return VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR;
286 case VK_SHADER_STAGE_GEOMETRY_BIT: return VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR;
287 case VK_SHADER_STAGE_FRAGMENT_BIT: return VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR;
288 case VK_SHADER_STAGE_COMPUTE_BIT: return VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR;
289
290 // Other usages are probably an error, so flag that.
291 default:
292 DE_FATAL("Invalid shader stage");
293 return (VkPipelineStageFlags)0;
294 }
295 }
296
297 //! Fill destination buffer with a repeating pattern.
fillPattern(void * const pData,const VkDeviceSize size,bool useIndexPattern=false)298 void fillPattern (void* const pData, const VkDeviceSize size, bool useIndexPattern = false)
299 {
300 // There are two pattern options - most operations use primePattern,
301 // indexPattern is only needed for testing vertex index bufffer.
302 static const deUint8 primePattern[] = { 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31 };
303 static const deUint32 indexPattern[] = { 0, 1, 2, 3, 4 };
304
305 const deUint8* pattern = (useIndexPattern ? reinterpret_cast<const deUint8*>(indexPattern)
306 : primePattern);
307 const deUint32 patternSize = static_cast<deUint32>(useIndexPattern
308 ? DE_LENGTH_OF_ARRAY(indexPattern)*sizeof(deUint32)
309 : DE_LENGTH_OF_ARRAY(primePattern));
310 deUint8* const pBytes = static_cast<deUint8*>(pData);
311
312 for (deUint32 i = 0; i < size; ++i)
313 pBytes[i] = pattern[i % patternSize];
314 }
315
316 //! Get size in bytes of a pixel buffer with given extent.
getPixelBufferSize(const VkFormat format,const VkExtent3D & extent)317 VkDeviceSize getPixelBufferSize (const VkFormat format, const VkExtent3D& extent)
318 {
319 const int pixelSize = tcu::getPixelSize(mapVkFormat(format));
320 return (pixelSize * extent.width * extent.height * extent.depth);
321 }
322
323 //! Determine the size of a 2D image that can hold sizeBytes data.
get2DImageExtentWithSize(const VkDeviceSize sizeBytes,const deUint32 pixelSize)324 VkExtent3D get2DImageExtentWithSize (const VkDeviceSize sizeBytes, const deUint32 pixelSize)
325 {
326 const deUint32 size = static_cast<deUint32>(sizeBytes / pixelSize);
327
328 DE_ASSERT(size <= MAX_IMAGE_DIMENSION_2D * MAX_IMAGE_DIMENSION_2D);
329
330 return makeExtent3D(
331 std::min(size, static_cast<deUint32>(MAX_IMAGE_DIMENSION_2D)),
332 (size / MAX_IMAGE_DIMENSION_2D) + (size % MAX_IMAGE_DIMENSION_2D != 0 ? 1u : 0u),
333 1u);
334 }
335
makeClearValue(const VkFormat format)336 VkClearValue makeClearValue (const VkFormat format)
337 {
338 if (isDepthStencilFormat(format))
339 return makeClearValueDepthStencil(0.4f, 21u);
340 else
341 {
342 if (isIntFormat(format) || isUintFormat(format))
343 return makeClearValueColorU32(8u, 16u, 24u, 32u);
344 else
345 return makeClearValueColorF32(0.25f, 0.49f, 0.75f, 1.0f);
346 }
347 }
348
clearPixelBuffer(tcu::PixelBufferAccess & pixels,const VkClearValue & clearValue)349 void clearPixelBuffer (tcu::PixelBufferAccess& pixels, const VkClearValue& clearValue)
350 {
351 const tcu::TextureFormat format = pixels.getFormat();
352 const tcu::TextureChannelClass channelClass = tcu::getTextureChannelClass(format.type);
353
354 if (format.order == tcu::TextureFormat::D)
355 {
356 for (int z = 0; z < pixels.getDepth(); z++)
357 for (int y = 0; y < pixels.getHeight(); y++)
358 for (int x = 0; x < pixels.getWidth(); x++)
359 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
360 }
361 else if (format.order == tcu::TextureFormat::S)
362 {
363 for (int z = 0; z < pixels.getDepth(); z++)
364 for (int y = 0; y < pixels.getHeight(); y++)
365 for (int x = 0; x < pixels.getWidth(); x++)
366 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
367 }
368 else if (format.order == tcu::TextureFormat::DS)
369 {
370 for (int z = 0; z < pixels.getDepth(); z++)
371 for (int y = 0; y < pixels.getHeight(); y++)
372 for (int x = 0; x < pixels.getWidth(); x++)
373 {
374 pixels.setPixDepth(clearValue.depthStencil.depth, x, y, z);
375 pixels.setPixStencil(clearValue.depthStencil.stencil, x, y, z);
376 }
377 }
378 else if (channelClass == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER || channelClass == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER)
379 {
380 const tcu::UVec4 color (clearValue.color.uint32);
381
382 for (int z = 0; z < pixels.getDepth(); z++)
383 for (int y = 0; y < pixels.getHeight(); y++)
384 for (int x = 0; x < pixels.getWidth(); x++)
385 pixels.setPixel(color, x, y, z);
386 }
387 else
388 {
389 const tcu::Vec4 color (clearValue.color.float32);
390
391 for (int z = 0; z < pixels.getDepth(); z++)
392 for (int y = 0; y < pixels.getHeight(); y++)
393 for (int x = 0; x < pixels.getWidth(); x++)
394 pixels.setPixel(color, x, y, z);
395 }
396 }
397
getImageViewType(const VkImageType imageType)398 VkImageViewType getImageViewType (const VkImageType imageType)
399 {
400 switch (imageType)
401 {
402 case VK_IMAGE_TYPE_1D: return VK_IMAGE_VIEW_TYPE_1D;
403 case VK_IMAGE_TYPE_2D: return VK_IMAGE_VIEW_TYPE_2D;
404 case VK_IMAGE_TYPE_3D: return VK_IMAGE_VIEW_TYPE_3D;
405
406 default:
407 DE_FATAL("Unknown image type");
408 return VK_IMAGE_VIEW_TYPE_LAST;
409 }
410 }
411
getShaderImageType(const VkFormat format,const VkImageType imageType)412 std::string getShaderImageType (const VkFormat format, const VkImageType imageType)
413 {
414 const tcu::TextureFormat texFormat = mapVkFormat(format);
415 const std::string formatPart = tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_UNSIGNED_INTEGER ? "u" :
416 tcu::getTextureChannelClass(texFormat.type) == tcu::TEXTURECHANNELCLASS_SIGNED_INTEGER ? "i" : "";
417 switch (imageType)
418 {
419 case VK_IMAGE_TYPE_1D: return formatPart + "image1D";
420 case VK_IMAGE_TYPE_2D: return formatPart + "image2D";
421 case VK_IMAGE_TYPE_3D: return formatPart + "image3D";
422
423 default:
424 DE_FATAL("Unknown image type");
425 return DE_NULL;
426 }
427 }
428
getShaderImageFormatQualifier(const VkFormat format)429 std::string getShaderImageFormatQualifier (const VkFormat format)
430 {
431 const tcu::TextureFormat texFormat = mapVkFormat(format);
432 const char* orderPart = DE_NULL;
433 const char* typePart = DE_NULL;
434
435 switch (texFormat.order)
436 {
437 case tcu::TextureFormat::R: orderPart = "r"; break;
438 case tcu::TextureFormat::RG: orderPart = "rg"; break;
439 case tcu::TextureFormat::RGB: orderPart = "rgb"; break;
440 case tcu::TextureFormat::RGBA: orderPart = "rgba"; break;
441
442 default:
443 DE_FATAL("Unksupported texture channel order");
444 break;
445 }
446
447 switch (texFormat.type)
448 {
449 case tcu::TextureFormat::FLOAT: typePart = "32f"; break;
450 case tcu::TextureFormat::HALF_FLOAT: typePart = "16f"; break;
451
452 case tcu::TextureFormat::UNSIGNED_INT32: typePart = "32ui"; break;
453 case tcu::TextureFormat::UNSIGNED_INT16: typePart = "16ui"; break;
454 case tcu::TextureFormat::UNSIGNED_INT8: typePart = "8ui"; break;
455
456 case tcu::TextureFormat::SIGNED_INT32: typePart = "32i"; break;
457 case tcu::TextureFormat::SIGNED_INT16: typePart = "16i"; break;
458 case tcu::TextureFormat::SIGNED_INT8: typePart = "8i"; break;
459
460 case tcu::TextureFormat::UNORM_INT16: typePart = "16"; break;
461 case tcu::TextureFormat::UNORM_INT8: typePart = "8"; break;
462
463 case tcu::TextureFormat::SNORM_INT16: typePart = "16_snorm"; break;
464 case tcu::TextureFormat::SNORM_INT8: typePart = "8_snorm"; break;
465
466 default:
467 DE_FATAL("Unksupported texture channel type");
468 break;
469 }
470
471 return std::string(orderPart) + typePart;
472 }
473
474 namespace FillUpdateBuffer
475 {
476
477 enum BufferOp
478 {
479 BUFFER_OP_FILL,
480 BUFFER_OP_UPDATE,
481 BUFFER_OP_UPDATE_WITH_INDEX_PATTERN,
482 };
483
484 class Implementation : public Operation
485 {
486 public:
Implementation(OperationContext & context,Resource & resource,const BufferOp bufferOp)487 Implementation (OperationContext& context, Resource& resource, const BufferOp bufferOp)
488 : m_context (context)
489 , m_resource (resource)
490 , m_fillValue (0x13)
491 , m_bufferOp (bufferOp)
492 {
493 DE_ASSERT((m_resource.getBuffer().size % sizeof(deUint32)) == 0);
494 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_resource.getBuffer().size <= MAX_UPDATE_BUFFER_SIZE);
495
496 m_data.resize(static_cast<size_t>(m_resource.getBuffer().size));
497
498 if (m_bufferOp == BUFFER_OP_FILL)
499 {
500 const std::size_t size = m_data.size() / sizeof(m_fillValue);
501 deUint32* const pData = reinterpret_cast<deUint32*>(&m_data[0]);
502
503 for (deUint32 i = 0; i < size; ++i)
504 pData[i] = m_fillValue;
505 }
506 else if (m_bufferOp == BUFFER_OP_UPDATE)
507 {
508 fillPattern(&m_data[0], m_data.size());
509 }
510 else if(m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN)
511 {
512 fillPattern(&m_data[0], m_data.size(), true);
513 }
514 }
515
recordCommands(const VkCommandBuffer cmdBuffer)516 void recordCommands (const VkCommandBuffer cmdBuffer)
517 {
518 const DeviceInterface& vk = m_context.getDeviceInterface();
519
520 if (m_bufferOp == BUFFER_OP_FILL)
521 {
522 vk.cmdFillBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, m_fillValue);
523
524 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
525 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
526 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
527 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
528 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
529 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
530 m_resource.getBuffer().handle, // VkBuffer buffer
531 0u, // VkDeviceSize offset
532 m_resource.getBuffer().size // VkDeviceSize size
533 );
534 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
535 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
536 }
537 else
538 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, reinterpret_cast<deUint32*>(&m_data[0]));
539 }
540
getInSyncInfo(void) const541 SyncInfo getInSyncInfo (void) const
542 {
543 return emptySyncInfo;
544 }
545
getOutSyncInfo(void) const546 SyncInfo getOutSyncInfo (void) const
547 {
548 const SyncInfo syncInfo =
549 {
550 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
551 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
552 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
553 };
554
555 return syncInfo;
556 }
557
getData(void) const558 Data getData (void) const
559 {
560 const Data data =
561 {
562 m_data.size(), // std::size_t size;
563 &m_data[0], // const deUint8* data;
564 };
565 return data;
566 }
567
setData(const Data & data)568 void setData (const Data& data)
569 {
570 deMemcpy(&m_data[0], data.data, data.size);
571 }
572
573 private:
574 OperationContext& m_context;
575 Resource& m_resource;
576 std::vector<deUint8> m_data;
577 const deUint32 m_fillValue;
578 const BufferOp m_bufferOp;
579 };
580
581 class Support : public OperationSupport
582 {
583 public:
Support(const ResourceDescription & resourceDesc,const BufferOp bufferOp)584 Support (const ResourceDescription& resourceDesc, const BufferOp bufferOp)
585 : m_resourceDesc (resourceDesc)
586 , m_bufferOp (bufferOp)
587 {
588 DE_ASSERT(m_bufferOp == BUFFER_OP_FILL || m_bufferOp == BUFFER_OP_UPDATE || m_bufferOp == BUFFER_OP_UPDATE_WITH_INDEX_PATTERN);
589 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
590 }
591
getInResourceUsageFlags(void) const592 deUint32 getInResourceUsageFlags (void) const
593 {
594 return 0;
595 }
596
getOutResourceUsageFlags(void) const597 deUint32 getOutResourceUsageFlags (void) const
598 {
599 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
600 }
601
getQueueFlags(const OperationContext & context) const602 VkQueueFlags getQueueFlags (const OperationContext& context) const
603 {
604 if (m_bufferOp == BUFFER_OP_FILL && !context.isDeviceFunctionalitySupported("VK_KHR_maintenance1"))
605 {
606 return VK_QUEUE_COMPUTE_BIT | VK_QUEUE_GRAPHICS_BIT;
607 }
608
609 return VK_QUEUE_TRANSFER_BIT;
610 }
611
build(OperationContext & context,Resource & resource) const612 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
613 {
614 return de::MovePtr<Operation>(new Implementation(context, resource, m_bufferOp));
615 }
616
build(OperationContext &,Resource &,Resource &) const617 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
618 {
619 DE_ASSERT(0);
620 return de::MovePtr<Operation>();
621 }
622
623 private:
624 const ResourceDescription m_resourceDesc;
625 const BufferOp m_bufferOp;
626 };
627
628 } // FillUpdateBuffer ns
629
630 namespace CopyBuffer
631 {
632
633 class Implementation : public Operation
634 {
635 public:
Implementation(OperationContext & context,Resource & resource,const AccessMode mode)636 Implementation (OperationContext& context, Resource& resource, const AccessMode mode)
637 : m_context (context)
638 , m_resource (resource)
639 , m_mode (mode)
640 {
641 const DeviceInterface& vk = m_context.getDeviceInterface();
642 const VkDevice device = m_context.getDevice();
643 Allocator& allocator = m_context.getAllocator();
644 const VkBufferUsageFlags hostBufferUsage = (m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
645
646 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, hostBufferUsage), MemoryRequirement::HostVisible));
647
648 const Allocation& alloc = m_hostBuffer->getAllocation();
649
650 if (m_mode == ACCESS_MODE_READ)
651 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
652 else
653 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
654
655 flushAlloc(vk, device, alloc);
656 }
657
recordCommands(const VkCommandBuffer cmdBuffer)658 void recordCommands (const VkCommandBuffer cmdBuffer)
659 {
660 const DeviceInterface& vk = m_context.getDeviceInterface();
661 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_resource.getBuffer().size);
662 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
663
664 if (m_mode == ACCESS_MODE_READ)
665 {
666 vk.cmdCopyBuffer(cmdBuffer, m_resource.getBuffer().handle, **m_hostBuffer, 1u, ©Region);
667
668 // Insert a barrier so copied data is available to the host
669 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
670 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
671 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
672 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
673 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
674 **m_hostBuffer, // VkBuffer buffer
675 0u, // VkDeviceSize offset
676 m_resource.getBuffer().size // VkDeviceSize size
677 );
678 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
679 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
680 }
681 else
682 {
683 // Insert a barrier so buffer data is available to the device
684 //const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
685 // VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
686 // VK_ACCESS_2_HOST_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
687 // VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
688 // VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
689 // **m_hostBuffer, // VkBuffer buffer
690 // 0u, // VkDeviceSize offset
691 // m_resource.getBuffer().size // VkDeviceSize size
692 //);
693 //VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
694 //synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
695
696 vk.cmdCopyBuffer(cmdBuffer, **m_hostBuffer, m_resource.getBuffer().handle, 1u, ©Region);
697 }
698 }
699
getInSyncInfo(void) const700 SyncInfo getInSyncInfo (void) const
701 {
702 const VkAccessFlags access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : 0);
703 const SyncInfo syncInfo =
704 {
705 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
706 access, // VkAccessFlags accessMask;
707 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
708 };
709 return syncInfo;
710 }
711
getOutSyncInfo(void) const712 SyncInfo getOutSyncInfo (void) const
713 {
714 const VkAccessFlags access = (m_mode == ACCESS_MODE_WRITE ? VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR : 0);
715 const SyncInfo syncInfo =
716 {
717 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
718 access, // VkAccessFlags accessMask;
719 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
720 };
721 return syncInfo;
722 }
723
getData(void) const724 Data getData (void) const
725 {
726 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
727 }
728
setData(const Data & data)729 void setData (const Data& data)
730 {
731 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
732 setHostBufferData(m_context, *m_hostBuffer, data);
733 }
734
735 private:
736 OperationContext& m_context;
737 Resource& m_resource;
738 const AccessMode m_mode;
739 de::MovePtr<Buffer> m_hostBuffer;
740 };
741
742 class Support : public OperationSupport
743 {
744 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)745 Support (const ResourceDescription& resourceDesc, const AccessMode mode)
746 : m_mode (mode)
747 {
748 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
749 DE_UNREF(resourceDesc);
750 }
751
getInResourceUsageFlags(void) const752 deUint32 getInResourceUsageFlags (void) const
753 {
754 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
755 }
756
getOutResourceUsageFlags(void) const757 deUint32 getOutResourceUsageFlags (void) const
758 {
759 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
760 }
761
getQueueFlags(const OperationContext & context) const762 VkQueueFlags getQueueFlags (const OperationContext& context) const
763 {
764 DE_UNREF(context);
765 return VK_QUEUE_TRANSFER_BIT;
766 }
767
build(OperationContext & context,Resource & resource) const768 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
769 {
770 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
771 }
772
build(OperationContext &,Resource &,Resource &) const773 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
774 {
775 DE_ASSERT(0);
776 return de::MovePtr<Operation>();
777 }
778
779 private:
780 const AccessMode m_mode;
781 };
782
783 class CopyImplementation : public Operation
784 {
785 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)786 CopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
787 : m_context (context)
788 , m_inResource (inResource)
789 , m_outResource (outResource)
790 {
791 }
792
recordCommands(const VkCommandBuffer cmdBuffer)793 void recordCommands (const VkCommandBuffer cmdBuffer)
794 {
795 const DeviceInterface& vk = m_context.getDeviceInterface();
796 const VkBufferCopy copyRegion = makeBufferCopy(0u, 0u, m_inResource.getBuffer().size);
797
798 vk.cmdCopyBuffer(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getBuffer().handle, 1u, ©Region);
799 }
800
getInSyncInfo(void) const801 SyncInfo getInSyncInfo (void) const
802 {
803 const SyncInfo syncInfo =
804 {
805 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
806 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
807 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
808 };
809 return syncInfo;
810 }
811
getOutSyncInfo(void) const812 SyncInfo getOutSyncInfo (void) const
813 {
814 const SyncInfo syncInfo =
815 {
816 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
817 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
818 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
819 };
820 return syncInfo;
821 }
822
getData(void) const823 Data getData (void) const
824 {
825 Data data = { 0, DE_NULL };
826 return data;
827 }
828
setData(const Data &)829 void setData (const Data&)
830 {
831 DE_ASSERT(0);
832 }
833
834 private:
835 OperationContext& m_context;
836 Resource& m_inResource;
837 Resource& m_outResource;
838 de::MovePtr<Buffer> m_hostBuffer;
839 };
840
841 class CopySupport : public OperationSupport
842 {
843 public:
CopySupport(const ResourceDescription & resourceDesc)844 CopySupport (const ResourceDescription& resourceDesc)
845 {
846 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_BUFFER);
847 DE_UNREF(resourceDesc);
848 }
849
getInResourceUsageFlags(void) const850 deUint32 getInResourceUsageFlags (void) const
851 {
852 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
853 }
854
getOutResourceUsageFlags(void) const855 deUint32 getOutResourceUsageFlags (void) const
856 {
857 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
858 }
859
getQueueFlags(const OperationContext & context) const860 VkQueueFlags getQueueFlags (const OperationContext& context) const
861 {
862 DE_UNREF(context);
863 return VK_QUEUE_TRANSFER_BIT;
864 }
865
build(OperationContext &,Resource &) const866 de::MovePtr<Operation> build (OperationContext&, Resource&) const
867 {
868 DE_ASSERT(0);
869 return de::MovePtr<Operation>();
870 }
871
build(OperationContext & context,Resource & inResource,Resource & outResource) const872 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
873 {
874 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
875 }
876 };
877
878 } // CopyBuffer ns
879
880 namespace CopyBlitResolveImage
881 {
882
883 class ImplementationBase : public Operation
884 {
885 public:
886 //! Copy/Blit/Resolve etc. operation
887 virtual void recordCopyCommand (const VkCommandBuffer cmdBuffer) = 0;
888
889 //! Get source stage mask that is used during read - added to test synchronization2 new stage masks
890 virtual VkPipelineStageFlags2KHR getReadSrcStageMask() const = 0;
891
ImplementationBase(OperationContext & context,Resource & resource,const AccessMode mode)892 ImplementationBase (OperationContext& context, Resource& resource, const AccessMode mode)
893 : m_context (context)
894 , m_resource (resource)
895 , m_mode (mode)
896 , m_bufferSize (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
897 {
898 const DeviceInterface& vk = m_context.getDeviceInterface();
899 const VkDevice device = m_context.getDevice();
900 Allocator& allocator = m_context.getAllocator();
901
902 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
903 vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
904 MemoryRequirement::HostVisible));
905
906 const Allocation& alloc = m_hostBuffer->getAllocation();
907 if (m_mode == ACCESS_MODE_READ)
908 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
909 else
910 fillPattern(alloc.getHostPtr(), m_bufferSize);
911 flushAlloc(vk, device, alloc);
912
913 // Staging image
914 m_image = de::MovePtr<Image>(new Image(
915 vk, device, allocator,
916 makeImageCreateInfo(m_resource.getImage().imageType, m_resource.getImage().extent, m_resource.getImage().format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT),
917 MemoryRequirement::Any));
918 }
919
recordCommands(const VkCommandBuffer cmdBuffer)920 void recordCommands (const VkCommandBuffer cmdBuffer)
921 {
922 const DeviceInterface& vk = m_context.getDeviceInterface();
923 const VkBufferImageCopy bufferCopyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
924 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
925
926 // Staging image layout
927 {
928 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
929 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
930 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
931 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
932 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
933 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
934 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
935 **m_image, // VkImage image
936 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
937 );
938 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
939 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
940 }
941
942 if (m_mode == ACCESS_MODE_READ)
943 {
944 // Resource Image -> Staging image
945 recordCopyCommand(cmdBuffer);
946
947 // Staging image layout
948 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
949 getReadSrcStageMask(), // VkPipelineStageFlags2KHR srcStageMask
950 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
951 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
952 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
953 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
954 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
955 **m_image, // VkImage image
956 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
957 );
958 VkDependencyInfoKHR imageDependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
959 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &imageDependencyInfo);
960
961 // Image -> Host buffer
962 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &bufferCopyRegion);
963
964 // Insert a barrier so copied data is available to the host
965 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
966 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
967 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
968 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
969 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
970 **m_hostBuffer, // VkBuffer buffer
971 0u, // VkDeviceSize offset
972 m_bufferSize // VkDeviceSize size
973 );
974 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
975 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
976 }
977 else
978 {
979 // Host buffer -> Staging image
980 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &bufferCopyRegion);
981
982 // Staging image layout
983 {
984 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
985 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
986 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
987 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
988 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
989 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
990 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
991 **m_image, // VkImage image
992 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
993 );
994 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
995 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
996 }
997
998 // Resource image layout
999 {
1000 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1001 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
1002 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1003 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
1004 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1005 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1006 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1007 m_resource.getImage().handle, // VkImage image
1008 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1009 );
1010 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1011 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1012 }
1013
1014 // Staging image -> Resource Image
1015 recordCopyCommand(cmdBuffer);
1016 }
1017 }
1018
getInSyncInfo(void) const1019 SyncInfo getInSyncInfo (void) const
1020 {
1021 const VkAccessFlags2KHR access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1022 const VkImageLayout layout = (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1023 const SyncInfo syncInfo =
1024 {
1025 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1026 access, // VkAccessFlags accessMask;
1027 layout, // VkImageLayout imageLayout;
1028 };
1029 return syncInfo;
1030 }
1031
getOutSyncInfo(void) const1032 SyncInfo getOutSyncInfo (void) const
1033 {
1034 const VkAccessFlags2KHR access = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_TRANSFER_READ_BIT_KHR : VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR);
1035 const VkImageLayout layout = (m_mode == ACCESS_MODE_READ ? VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL : VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
1036 const SyncInfo syncInfo =
1037 {
1038 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
1039 access, // VkAccessFlags accessMask;
1040 layout, // VkImageLayout imageLayout;
1041 };
1042 return syncInfo;
1043 }
1044
getData(void) const1045 Data getData (void) const
1046 {
1047 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
1048 }
1049
setData(const Data & data)1050 void setData (const Data& data)
1051 {
1052 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1053 setHostBufferData(m_context, *m_hostBuffer, data);
1054 }
1055
1056 protected:
1057 OperationContext& m_context;
1058 Resource& m_resource;
1059 const AccessMode m_mode;
1060 const VkDeviceSize m_bufferSize;
1061 de::MovePtr<Buffer> m_hostBuffer;
1062 de::MovePtr<Image> m_image;
1063 };
1064
makeExtentOffset(const Resource & resource)1065 VkOffset3D makeExtentOffset (const Resource& resource)
1066 {
1067 DE_ASSERT(resource.getType() == RESOURCE_TYPE_IMAGE);
1068 const VkExtent3D extent = resource.getImage().extent;
1069
1070 switch (resource.getImage().imageType)
1071 {
1072 case VK_IMAGE_TYPE_1D: return makeOffset3D(extent.width, 1, 1);
1073 case VK_IMAGE_TYPE_2D: return makeOffset3D(extent.width, extent.height, 1);
1074 case VK_IMAGE_TYPE_3D: return makeOffset3D(extent.width, extent.height, extent.depth);
1075 default:
1076 DE_ASSERT(0);
1077 return VkOffset3D();
1078 }
1079 }
1080
makeBlitRegion(const Resource & resource)1081 VkImageBlit makeBlitRegion (const Resource& resource)
1082 {
1083 const VkImageBlit blitRegion =
1084 {
1085 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1086 { makeOffset3D(0, 0, 0), makeExtentOffset(resource) }, // VkOffset3D srcOffsets[2];
1087 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1088 { makeOffset3D(0, 0, 0), makeExtentOffset(resource) }, // VkOffset3D dstOffsets[2];
1089 };
1090 return blitRegion;
1091 }
1092
1093 class BlitImplementation : public ImplementationBase
1094 {
1095 public:
BlitImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1096 BlitImplementation (OperationContext& context, Resource& resource, const AccessMode mode)
1097 : ImplementationBase (context, resource, mode)
1098 , m_blitRegion (makeBlitRegion(m_resource))
1099 {
1100 const InstanceInterface& vki = m_context.getInstanceInterface();
1101 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1102 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, m_resource.getImage().format);
1103 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1104
1105 // SRC and DST blit is required because both images are using the same format.
1106 if ((formatProps.optimalTilingFeatures & requiredFlags) != requiredFlags)
1107 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1108 }
1109
recordCopyCommand(const VkCommandBuffer cmdBuffer)1110 void recordCopyCommand (const VkCommandBuffer cmdBuffer)
1111 {
1112 const DeviceInterface& vk = m_context.getDeviceInterface();
1113
1114 if (m_mode == ACCESS_MODE_READ)
1115 {
1116 // Resource Image -> Staging image
1117 vk.cmdBlitImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1118 1u, &m_blitRegion, VK_FILTER_NEAREST);
1119 }
1120 else
1121 {
1122 // Staging image -> Resource Image
1123 vk.cmdBlitImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1124 1u, &m_blitRegion, VK_FILTER_NEAREST);
1125 }
1126 }
1127
getReadSrcStageMask() const1128 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1129 {
1130 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ? VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR : VK_PIPELINE_STAGE_2_BLIT_BIT_KHR;
1131 }
1132
1133
1134 private:
1135 const VkImageBlit m_blitRegion;
1136 };
1137
1138 template <typename ImageCopyOrResolve>
makeImageRegion(const Resource & resource)1139 ImageCopyOrResolve makeImageRegion (const Resource& resource)
1140 {
1141 return
1142 {
1143 resource.getImage().subresourceLayers, // VkImageSubresourceLayers srcSubresource;
1144 makeOffset3D(0, 0, 0), // VkOffset3D srcOffset;
1145 resource.getImage().subresourceLayers, // VkImageSubresourceLayers dstSubresource;
1146 makeOffset3D(0, 0, 0), // VkOffset3D dstOffset;
1147 resource.getImage().extent, // VkExtent3D extent;
1148 };
1149 }
1150
1151 class CopyImplementation : public ImplementationBase
1152 {
1153 public:
CopyImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1154 CopyImplementation (OperationContext& context, Resource& resource, const AccessMode mode)
1155 : ImplementationBase (context, resource, mode)
1156 , m_imageCopyRegion (makeImageRegion<VkImageCopy>(m_resource))
1157 {
1158 }
1159
recordCopyCommand(const VkCommandBuffer cmdBuffer)1160 void recordCopyCommand (const VkCommandBuffer cmdBuffer)
1161 {
1162 const DeviceInterface& vk = m_context.getDeviceInterface();
1163
1164 if (m_mode == ACCESS_MODE_READ)
1165 {
1166 // Resource Image -> Staging image
1167 vk.cmdCopyImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1168 }
1169 else
1170 {
1171 // Staging image -> Resource Image
1172 vk.cmdCopyImage(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageCopyRegion);
1173 }
1174 }
1175
getReadSrcStageMask() const1176 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1177 {
1178 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ? VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR : VK_PIPELINE_STAGE_2_COPY_BIT_KHR;
1179 }
1180
1181 private:
1182 const VkImageCopy m_imageCopyRegion;
1183 };
1184
1185 class ResolveImplementation : public ImplementationBase
1186 {
1187 public:
ResolveImplementation(OperationContext & context,Resource & resource,const AccessMode mode)1188 ResolveImplementation(OperationContext& context, Resource& resource, const AccessMode mode)
1189 : ImplementationBase (context, resource, mode)
1190 , m_imageResolveRegion (makeImageRegion<VkImageResolve>(resource))
1191 {
1192 DE_ASSERT(m_mode == ACCESS_MODE_READ);
1193 }
1194
recordCopyCommand(const VkCommandBuffer cmdBuffer)1195 void recordCopyCommand(const VkCommandBuffer cmdBuffer)
1196 {
1197 const DeviceInterface& vk = m_context.getDeviceInterface();
1198
1199 // Resource Image -> Staging image
1200 vk.cmdResolveImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &m_imageResolveRegion);
1201 }
1202
getReadSrcStageMask() const1203 VkPipelineStageFlags2KHR getReadSrcStageMask() const
1204 {
1205 return (m_context.getSynchronizationType() == SynchronizationType::LEGACY) ? VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR : VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR;
1206 }
1207
1208 private:
1209 VkImageResolve m_imageResolveRegion;
1210 };
1211
1212 enum Type
1213 {
1214 TYPE_COPY,
1215 TYPE_BLIT,
1216 TYPE_RESOLVE,
1217 };
1218
1219 class Support : public OperationSupport
1220 {
1221 public:
Support(const ResourceDescription & resourceDesc,const Type type,const AccessMode mode)1222 Support (const ResourceDescription& resourceDesc, const Type type, const AccessMode mode)
1223 : m_type (type)
1224 , m_mode (mode)
1225 {
1226 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1227
1228 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1229 m_requiredQueueFlags = (isDepthStencil || m_type != TYPE_COPY ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1230
1231 // Don't blit depth/stencil images.
1232 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1233 }
1234
getInResourceUsageFlags(void) const1235 deUint32 getInResourceUsageFlags (void) const
1236 {
1237 return (m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0);
1238 }
1239
getOutResourceUsageFlags(void) const1240 deUint32 getOutResourceUsageFlags (void) const
1241 {
1242 return (m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0);
1243 }
1244
getQueueFlags(const OperationContext & context) const1245 VkQueueFlags getQueueFlags (const OperationContext& context) const
1246 {
1247 DE_UNREF(context);
1248 return m_requiredQueueFlags;
1249 }
1250
build(OperationContext & context,Resource & resource) const1251 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
1252 {
1253 if (m_type == TYPE_COPY)
1254 return de::MovePtr<Operation>(new CopyImplementation(context, resource, m_mode));
1255 else if (m_type == TYPE_BLIT)
1256 return de::MovePtr<Operation>(new BlitImplementation(context, resource, m_mode));
1257 else
1258 return de::MovePtr<Operation>(new ResolveImplementation(context, resource, m_mode));
1259 }
1260
build(OperationContext &,Resource &,Resource &) const1261 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
1262 {
1263 DE_ASSERT(0);
1264 return de::MovePtr<Operation>();
1265 }
1266
1267 private:
1268 const Type m_type;
1269 const AccessMode m_mode;
1270 VkQueueFlags m_requiredQueueFlags;
1271 };
1272
1273 class BlitCopyImplementation : public Operation
1274 {
1275 public:
BlitCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)1276 BlitCopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
1277 : m_context (context)
1278 , m_inResource (inResource)
1279 , m_outResource (outResource)
1280 , m_blitRegion (makeBlitRegion(m_inResource))
1281 {
1282 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1283 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1284
1285 const InstanceInterface& vki = m_context.getInstanceInterface();
1286 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1287 const VkFormatProperties formatProps = getPhysicalDeviceFormatProperties(vki, physDevice, m_inResource.getImage().format);
1288 const VkFormatFeatureFlags requiredFlags = (VK_FORMAT_FEATURE_BLIT_SRC_BIT | VK_FORMAT_FEATURE_BLIT_DST_BIT);
1289
1290 // SRC and DST blit is required because both images are using the same format.
1291 if ((formatProps.optimalTilingFeatures & requiredFlags) != requiredFlags)
1292 TCU_THROW(NotSupportedError, "Format doesn't support blits");
1293 }
1294
recordCommands(const VkCommandBuffer cmdBuffer)1295 void recordCommands (const VkCommandBuffer cmdBuffer)
1296 {
1297 const DeviceInterface& vk = m_context.getDeviceInterface();
1298 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1299
1300 {
1301 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1302 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1303 (VkAccessFlags2KHR)0, // VkAccessFlags2KHR srcAccessMask
1304 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1305 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1306 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1307 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1308 m_outResource.getImage().handle, // VkImage image
1309 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1310 );
1311 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1312 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1313 }
1314
1315 vk.cmdBlitImage(cmdBuffer,
1316 m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1317 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1318 1u, &m_blitRegion, VK_FILTER_NEAREST);
1319 }
1320
getInSyncInfo(void) const1321 SyncInfo getInSyncInfo (void) const
1322 {
1323 const SyncInfo syncInfo =
1324 {
1325 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1326 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1327 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1328 };
1329 return syncInfo;
1330 }
1331
getOutSyncInfo(void) const1332 SyncInfo getOutSyncInfo (void) const
1333 {
1334 const SyncInfo syncInfo =
1335 {
1336 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1337 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1338 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1339 };
1340 return syncInfo;
1341 }
1342
getData(void) const1343 Data getData (void) const
1344 {
1345 Data data = { 0, DE_NULL };
1346 return data;
1347 }
1348
setData(const Data &)1349 void setData (const Data&)
1350 {
1351 DE_ASSERT(0);
1352 }
1353
1354 private:
1355 OperationContext& m_context;
1356 Resource& m_inResource;
1357 Resource& m_outResource;
1358 const VkImageBlit m_blitRegion;
1359 };
1360
1361 class CopyCopyImplementation : public Operation
1362 {
1363 public:
CopyCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)1364 CopyCopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
1365 : m_context (context)
1366 , m_inResource (inResource)
1367 , m_outResource (outResource)
1368 , m_imageCopyRegion (makeImageRegion<VkImageCopy>(m_inResource))
1369 {
1370 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
1371 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
1372 }
1373
recordCommands(const VkCommandBuffer cmdBuffer)1374 void recordCommands (const VkCommandBuffer cmdBuffer)
1375 {
1376 const DeviceInterface& vk = m_context.getDeviceInterface();
1377 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1378
1379 {
1380 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1381 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1382 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1383 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1384 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1385 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1386 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1387 m_outResource.getImage().handle, // VkImage image
1388 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1389 );
1390 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1391 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1392 }
1393
1394 vk.cmdCopyImage(cmdBuffer,
1395 m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1396 m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1397 1u, &m_imageCopyRegion);
1398 }
1399
getInSyncInfo(void) const1400 SyncInfo getInSyncInfo (void) const
1401 {
1402 const SyncInfo syncInfo =
1403 {
1404 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1405 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
1406 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
1407 };
1408 return syncInfo;
1409 }
1410
getOutSyncInfo(void) const1411 SyncInfo getOutSyncInfo (void) const
1412 {
1413 const SyncInfo syncInfo =
1414 {
1415 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags stageMask;
1416 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
1417 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
1418 };
1419 return syncInfo;
1420 }
1421
getData(void) const1422 Data getData (void) const
1423 {
1424 Data data = { 0, DE_NULL };
1425 return data;
1426 }
1427
setData(const Data &)1428 void setData (const Data&)
1429 {
1430 DE_ASSERT(0);
1431 }
1432
1433 private:
1434 OperationContext& m_context;
1435 Resource& m_inResource;
1436 Resource& m_outResource;
1437 const VkImageCopy m_imageCopyRegion;
1438 };
1439
1440 class CopySupport : public OperationSupport
1441 {
1442 public:
CopySupport(const ResourceDescription & resourceDesc,const Type type)1443 CopySupport (const ResourceDescription& resourceDesc, const Type type)
1444 : m_type (type)
1445 {
1446 DE_ASSERT(resourceDesc.type == RESOURCE_TYPE_IMAGE);
1447
1448 const bool isDepthStencil = isDepthStencilFormat(resourceDesc.imageFormat);
1449 m_requiredQueueFlags = (isDepthStencil || m_type == TYPE_BLIT ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT);
1450
1451 // Don't blit depth/stencil images.
1452 DE_ASSERT(m_type != TYPE_BLIT || !isDepthStencil);
1453 }
1454
getInResourceUsageFlags(void) const1455 deUint32 getInResourceUsageFlags (void) const
1456 {
1457 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1458 }
1459
getOutResourceUsageFlags(void) const1460 deUint32 getOutResourceUsageFlags (void) const
1461 {
1462 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1463 }
1464
getQueueFlags(const OperationContext & context) const1465 VkQueueFlags getQueueFlags (const OperationContext& context) const
1466 {
1467 DE_UNREF(context);
1468 return m_requiredQueueFlags;
1469 }
1470
build(OperationContext &,Resource &) const1471 de::MovePtr<Operation> build (OperationContext&, Resource&) const
1472 {
1473 DE_ASSERT(0);
1474 return de::MovePtr<Operation>();
1475 }
1476
build(OperationContext & context,Resource & inResource,Resource & outResource) const1477 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
1478 {
1479 if (m_type == TYPE_COPY)
1480 return de::MovePtr<Operation>(new CopyCopyImplementation(context, inResource, outResource));
1481 else
1482 return de::MovePtr<Operation>(new BlitCopyImplementation(context, inResource, outResource));
1483 }
1484
1485 private:
1486 const Type m_type;
1487 VkQueueFlags m_requiredQueueFlags;
1488 };
1489
1490 } // CopyBlitImage ns
1491
1492 namespace ShaderAccess
1493 {
1494
1495 enum DispatchCall
1496 {
1497 DISPATCH_CALL_DISPATCH,
1498 DISPATCH_CALL_DISPATCH_INDIRECT,
1499 };
1500
1501 class GraphicsPipeline : public Pipeline
1502 {
1503 public:
GraphicsPipeline(OperationContext & context,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)1504 GraphicsPipeline (OperationContext& context, const VkShaderStageFlagBits stage, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
1505 : m_vertices (context)
1506 {
1507 const DeviceInterface& vk = context.getDeviceInterface();
1508 const VkDevice device = context.getDevice();
1509 Allocator& allocator = context.getAllocator();
1510 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
1511
1512 // Color attachment
1513
1514 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
1515 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
1516 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
1517 m_colorAttachmentImage = de::MovePtr<Image>(new Image(vk, device, allocator,
1518 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),
1519 MemoryRequirement::Any));
1520
1521 // Pipeline
1522
1523 m_colorAttachmentView = makeImageView (vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
1524 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
1525 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height);
1526 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1527
1528 GraphicsPipelineBuilder pipelineBuilder;
1529 pipelineBuilder
1530 .setRenderSize (tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
1531 .setVertexInputSingleAttribute (m_vertices.getVertexFormat(), m_vertices.getVertexStride())
1532 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"), DE_NULL)
1533 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
1534
1535 if (requiredStages & (VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT | VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT))
1536 pipelineBuilder
1537 .setPatchControlPoints (m_vertices.getNumVertices())
1538 .setShader (vk, device, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT, context.getBinaryCollection().get(shaderPrefix + "tesc"), DE_NULL)
1539 .setShader (vk, device, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT, context.getBinaryCollection().get(shaderPrefix + "tese"), DE_NULL);
1540
1541 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
1542 pipelineBuilder
1543 .setShader (vk, device, VK_SHADER_STAGE_GEOMETRY_BIT, context.getBinaryCollection().get(shaderPrefix + "geom"), DE_NULL);
1544
1545 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
1546 }
1547
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)1548 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1549 {
1550 const DeviceInterface& vk = context.getDeviceInterface();
1551 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(context.getSynchronizationType(), vk, DE_FALSE);
1552
1553 // Change color attachment image layout
1554 {
1555 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1556 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1557 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1558 VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, // VkPipelineStageFlags2KHR dstStageMask
1559 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1560 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1561 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
1562 **m_colorAttachmentImage, // VkImage image
1563 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
1564 );
1565 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1566 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1567 }
1568
1569 {
1570 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
1571 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
1572
1573 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
1574 }
1575
1576 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
1577 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
1578 {
1579 const VkDeviceSize vertexBufferOffset = 0ull;
1580 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
1581 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
1582 }
1583
1584 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
1585 endRenderPass(vk, cmdBuffer);
1586 }
1587
1588 private:
1589 const VertexGrid m_vertices;
1590 VkFormat m_colorFormat;
1591 de::MovePtr<Image> m_colorAttachmentImage;
1592 Move<VkImageView> m_colorAttachmentView;
1593 VkExtent3D m_colorImageExtent;
1594 VkImageSubresourceRange m_colorImageSubresourceRange;
1595 Move<VkRenderPass> m_renderPass;
1596 Move<VkFramebuffer> m_framebuffer;
1597 Move<VkPipelineLayout> m_pipelineLayout;
1598 Move<VkPipeline> m_pipeline;
1599 };
1600
1601 class ComputePipeline : public Pipeline
1602 {
1603 public:
ComputePipeline(OperationContext & context,const DispatchCall dispatchCall,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)1604 ComputePipeline (OperationContext& context, const DispatchCall dispatchCall, const std::string& shaderPrefix, const VkDescriptorSetLayout descriptorSetLayout)
1605 : m_dispatchCall (dispatchCall)
1606 {
1607 const DeviceInterface& vk = context.getDeviceInterface();
1608 const VkDevice device = context.getDevice();
1609 Allocator& allocator = context.getAllocator();
1610
1611 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1612 {
1613 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
1614 makeBufferCreateInfo(sizeof(VkDispatchIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
1615
1616 const Allocation& alloc = m_indirectBuffer->getAllocation();
1617 VkDispatchIndirectCommand* const pIndirectCommand = static_cast<VkDispatchIndirectCommand*>(alloc.getHostPtr());
1618
1619 pIndirectCommand->x = 1u;
1620 pIndirectCommand->y = 1u;
1621 pIndirectCommand->z = 1u;
1622
1623 flushAlloc(vk, device, alloc);
1624 }
1625
1626 const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
1627
1628 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
1629 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData());
1630 }
1631
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)1632 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
1633 {
1634 const DeviceInterface& vk = context.getDeviceInterface();
1635
1636 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
1637 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
1638
1639 if (m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT)
1640 vk.cmdDispatchIndirect(cmdBuffer, **m_indirectBuffer, 0u);
1641 else
1642 vk.cmdDispatch(cmdBuffer, 1u, 1u, 1u);
1643 }
1644
1645 private:
1646 const DispatchCall m_dispatchCall;
1647 de::MovePtr<Buffer> m_indirectBuffer;
1648 Move<VkPipelineLayout> m_pipelineLayout;
1649 Move<VkPipeline> m_pipeline;
1650 };
1651
1652 //! Read/write operation on a UBO/SSBO in graphics/compute pipeline.
1653 class BufferImplementation : public Operation
1654 {
1655 public:
BufferImplementation(OperationContext & context,Resource & resource,const VkShaderStageFlagBits stage,const BufferType bufferType,const std::string & shaderPrefix,const AccessMode mode,const PipelineType pipelineType,const DispatchCall dispatchCall)1656 BufferImplementation (OperationContext& context,
1657 Resource& resource,
1658 const VkShaderStageFlagBits stage,
1659 const BufferType bufferType,
1660 const std::string& shaderPrefix,
1661 const AccessMode mode,
1662 const PipelineType pipelineType,
1663 const DispatchCall dispatchCall)
1664 : m_context (context)
1665 , m_resource (resource)
1666 , m_stage (stage)
1667 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1668 , m_bufferType (bufferType)
1669 , m_mode (mode)
1670 , m_dispatchCall (dispatchCall)
1671 {
1672 requireFeaturesForSSBOAccess (m_context, m_stage);
1673
1674 const DeviceInterface& vk = m_context.getDeviceInterface();
1675 const VkDevice device = m_context.getDevice();
1676 Allocator& allocator = m_context.getAllocator();
1677
1678 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1679 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
1680
1681 // Init host buffer data
1682 {
1683 const Allocation& alloc = m_hostBuffer->getAllocation();
1684 if (m_mode == ACCESS_MODE_READ)
1685 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_resource.getBuffer().size));
1686 else
1687 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
1688 flushAlloc(vk, device, alloc);
1689 }
1690
1691 // Prepare descriptors
1692 {
1693 const VkDescriptorType bufferDescriptorType = (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
1694
1695 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1696 .addSingleBinding(bufferDescriptorType, m_stage)
1697 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
1698 .build(vk, device);
1699
1700 m_descriptorPool = DescriptorPoolBuilder()
1701 .addType(bufferDescriptorType)
1702 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
1703 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1704
1705 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1706
1707 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size);
1708 const VkDescriptorBufferInfo hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_resource.getBuffer().size);
1709
1710 if (m_mode == ACCESS_MODE_READ)
1711 {
1712 DescriptorSetUpdateBuilder()
1713 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), bufferDescriptorType, &bufferInfo)
1714 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1715 .update(vk, device);
1716 }
1717 else
1718 {
1719 DescriptorSetUpdateBuilder()
1720 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
1721 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
1722 .update(vk, device);
1723 }
1724 }
1725
1726 // Create pipeline
1727 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
1728 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1729 }
1730
recordCommands(const VkCommandBuffer cmdBuffer)1731 void recordCommands (const VkCommandBuffer cmdBuffer)
1732 {
1733 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
1734
1735 // Post draw/dispatch commands
1736
1737 if (m_mode == ACCESS_MODE_READ)
1738 {
1739 const DeviceInterface& vk = m_context.getDeviceInterface();
1740 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1741
1742 // Insert a barrier so data written by the shader is available to the host
1743 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
1744 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
1745 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1746 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
1747 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1748 **m_hostBuffer, // VkBuffer buffer
1749 0u, // VkDeviceSize offset
1750 m_resource.getBuffer().size // VkDeviceSize size
1751 );
1752 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
1753 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1754 }
1755 }
1756
getInSyncInfo(void) const1757 SyncInfo getInSyncInfo (void) const
1758 {
1759 const VkAccessFlags2KHR accessFlags = (m_mode == ACCESS_MODE_READ ? (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_ACCESS_2_UNIFORM_READ_BIT_KHR
1760 : VK_ACCESS_2_SHADER_READ_BIT_KHR)
1761 : VK_ACCESS_2_SHADER_WRITE_BIT_KHR);
1762 const SyncInfo syncInfo =
1763 {
1764 m_pipelineStage, // VkPipelineStageFlags stageMask;
1765 accessFlags, // VkAccessFlags accessMask;
1766 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1767 };
1768 return syncInfo;
1769 }
1770
getOutSyncInfo(void) const1771 SyncInfo getOutSyncInfo (void) const
1772 {
1773 const VkAccessFlags accessFlags = m_mode == ACCESS_MODE_WRITE ? VK_ACCESS_2_SHADER_WRITE_BIT_KHR : 0;
1774 const SyncInfo syncInfo =
1775 {
1776 m_pipelineStage, // VkPipelineStageFlags stageMask;
1777 accessFlags, // VkAccessFlags accessMask;
1778 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
1779 };
1780 return syncInfo;
1781 }
1782
getData(void) const1783 Data getData (void) const
1784 {
1785 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
1786 }
1787
setData(const Data & data)1788 void setData (const Data& data)
1789 {
1790 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
1791 setHostBufferData(m_context, *m_hostBuffer, data);
1792 }
1793
1794 private:
1795 OperationContext& m_context;
1796 Resource& m_resource;
1797 const VkShaderStageFlagBits m_stage;
1798 const VkPipelineStageFlags m_pipelineStage;
1799 const BufferType m_bufferType;
1800 const AccessMode m_mode;
1801 const DispatchCall m_dispatchCall;
1802 de::MovePtr<Buffer> m_hostBuffer;
1803 Move<VkDescriptorPool> m_descriptorPool;
1804 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
1805 Move<VkDescriptorSet> m_descriptorSet;
1806 de::MovePtr<Pipeline> m_pipeline;
1807 };
1808
1809 class ImageImplementation : public Operation
1810 {
1811 public:
ImageImplementation(OperationContext & context,Resource & resource,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const AccessMode mode,const PipelineType pipelineType,const DispatchCall dispatchCall)1812 ImageImplementation (OperationContext& context,
1813 Resource& resource,
1814 const VkShaderStageFlagBits stage,
1815 const std::string& shaderPrefix,
1816 const AccessMode mode,
1817 const PipelineType pipelineType,
1818 const DispatchCall dispatchCall)
1819 : m_context (context)
1820 , m_resource (resource)
1821 , m_stage (stage)
1822 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
1823 , m_mode (mode)
1824 , m_dispatchCall (dispatchCall)
1825 , m_hostBufferSizeBytes (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
1826 {
1827 const DeviceInterface& vk = m_context.getDeviceInterface();
1828 const InstanceInterface& vki = m_context.getInstanceInterface();
1829 const VkDevice device = m_context.getDevice();
1830 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
1831 Allocator& allocator = m_context.getAllocator();
1832
1833 // Image stores are always required, in either access mode.
1834 requireFeaturesForSSBOAccess(m_context, m_stage);
1835
1836 // Some storage image formats may not be supported
1837 requireStorageImageSupport(vki, physDevice, m_resource.getImage().format);
1838
1839 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
1840 vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT),
1841 MemoryRequirement::HostVisible));
1842
1843 // Init host buffer data
1844 {
1845 const Allocation& alloc = m_hostBuffer->getAllocation();
1846 if (m_mode == ACCESS_MODE_READ)
1847 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
1848 else
1849 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
1850 flushAlloc(vk, device, alloc);
1851 }
1852
1853 // Image resources
1854 {
1855 m_image = de::MovePtr<Image>(new Image(vk, device, allocator,
1856 makeImageCreateInfo(m_resource.getImage().imageType, m_resource.getImage().extent, m_resource.getImage().format,
1857 VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_STORAGE_BIT),
1858 MemoryRequirement::Any));
1859
1860 if (m_mode == ACCESS_MODE_READ)
1861 {
1862 m_srcImage = &m_resource.getImage().handle;
1863 m_dstImage = &(**m_image);
1864 }
1865 else
1866 {
1867 m_srcImage = &(**m_image);
1868 m_dstImage = &m_resource.getImage().handle;
1869 }
1870
1871 const VkImageViewType viewType = getImageViewType(m_resource.getImage().imageType);
1872
1873 m_srcImageView = makeImageView(vk, device, *m_srcImage, viewType, m_resource.getImage().format, m_resource.getImage().subresourceRange);
1874 m_dstImageView = makeImageView(vk, device, *m_dstImage, viewType, m_resource.getImage().format, m_resource.getImage().subresourceRange);
1875 }
1876
1877 // Prepare descriptors
1878 {
1879 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
1880 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
1881 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
1882 .build(vk, device);
1883
1884 m_descriptorPool = DescriptorPoolBuilder()
1885 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1886 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
1887 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
1888
1889 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
1890
1891 const VkDescriptorImageInfo srcImageInfo = makeDescriptorImageInfo(DE_NULL, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
1892 const VkDescriptorImageInfo dstImageInfo = makeDescriptorImageInfo(DE_NULL, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
1893
1894 DescriptorSetUpdateBuilder()
1895 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
1896 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
1897 .update(vk, device);
1898 }
1899
1900 // Create pipeline
1901 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
1902 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
1903 }
1904
recordCommands(const VkCommandBuffer cmdBuffer)1905 void recordCommands (const VkCommandBuffer cmdBuffer)
1906 {
1907 const DeviceInterface& vk = m_context.getDeviceInterface();
1908 const VkBufferImageCopy bufferCopyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
1909 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
1910
1911 // Destination image layout
1912 {
1913 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1914 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1915 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1916 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
1917 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1918 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1919 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
1920 *m_dstImage, // VkImage image
1921 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1922 );
1923 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1924 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1925 }
1926
1927 // In write mode, source image must be filled with data.
1928 if (m_mode == ACCESS_MODE_WRITE)
1929 {
1930 // Layout for transfer
1931 {
1932 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1933 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
1934 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
1935 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1936 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1937 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
1938 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
1939 *m_srcImage, // VkImage image
1940 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1941 );
1942 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1943 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1944 }
1945
1946 // Host buffer -> Src image
1947 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, *m_srcImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &bufferCopyRegion);
1948
1949 // Layout for shader reading
1950 {
1951 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1952 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
1953 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1954 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
1955 VK_ACCESS_2_SHADER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1956 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
1957 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
1958 *m_srcImage, // VkImage image
1959 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1960 );
1961 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1962 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1963 }
1964 }
1965
1966 // Execute shaders
1967
1968 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
1969
1970 // Post draw/dispatch commands
1971
1972 if (m_mode == ACCESS_MODE_READ)
1973 {
1974 // Layout for transfer
1975 {
1976 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
1977 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
1978 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1979 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR dstStageMask
1980 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
1981 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout oldLayout
1982 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
1983 *m_dstImage, // VkImage image
1984 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
1985 );
1986 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
1987 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
1988 }
1989
1990 // Dst image -> Host buffer
1991 vk.cmdCopyImageToBuffer(cmdBuffer, *m_dstImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, &bufferCopyRegion);
1992
1993 // Insert a barrier so data written by the shader is available to the host
1994 {
1995 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
1996 VK_PIPELINE_STAGE_TRANSFER_BIT, // VkPipelineStageFlags2KHR srcStageMask
1997 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
1998 VK_PIPELINE_STAGE_HOST_BIT, // VkPipelineStageFlags2KHR dstStageMask
1999 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2000 **m_hostBuffer, // VkBuffer buffer
2001 0u, // VkDeviceSize offset
2002 m_hostBufferSizeBytes // VkDeviceSize size
2003 );
2004 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
2005 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2006 }
2007 }
2008 }
2009
getInSyncInfo(void) const2010 SyncInfo getInSyncInfo (void) const
2011 {
2012 const VkAccessFlags accessFlags = (m_mode == ACCESS_MODE_READ ? VK_ACCESS_2_SHADER_READ_BIT_KHR : 0);
2013 const SyncInfo syncInfo =
2014 {
2015 m_pipelineStage, // VkPipelineStageFlags stageMask;
2016 accessFlags, // VkAccessFlags accessMask;
2017 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2018 };
2019 return syncInfo;
2020 }
2021
getOutSyncInfo(void) const2022 SyncInfo getOutSyncInfo (void) const
2023 {
2024 const VkAccessFlags accessFlags = (m_mode == ACCESS_MODE_WRITE ? VK_ACCESS_2_SHADER_WRITE_BIT_KHR : 0);
2025 const SyncInfo syncInfo =
2026 {
2027 m_pipelineStage, // VkPipelineStageFlags stageMask;
2028 accessFlags, // VkAccessFlags accessMask;
2029 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2030 };
2031 return syncInfo;
2032 }
2033
getData(void) const2034 Data getData (void) const
2035 {
2036 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
2037 }
2038
setData(const Data & data)2039 void setData (const Data& data)
2040 {
2041 DE_ASSERT(m_mode == ACCESS_MODE_WRITE);
2042 setHostBufferData(m_context, *m_hostBuffer, data);
2043 }
2044
2045 private:
2046 OperationContext& m_context;
2047 Resource& m_resource;
2048 const VkShaderStageFlagBits m_stage;
2049 const VkPipelineStageFlags m_pipelineStage;
2050 const AccessMode m_mode;
2051 const DispatchCall m_dispatchCall;
2052 const VkDeviceSize m_hostBufferSizeBytes;
2053 de::MovePtr<Buffer> m_hostBuffer;
2054 de::MovePtr<Image> m_image; //! Additional image used as src or dst depending on operation mode.
2055 const VkImage* m_srcImage;
2056 const VkImage* m_dstImage;
2057 Move<VkImageView> m_srcImageView;
2058 Move<VkImageView> m_dstImageView;
2059 Move<VkDescriptorPool> m_descriptorPool;
2060 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2061 Move<VkDescriptorSet> m_descriptorSet;
2062 de::MovePtr<Pipeline> m_pipeline;
2063 };
2064
2065 //! Create generic passthrough shaders with bits of custom code inserted in a specific shader stage.
initPassthroughPrograms(SourceCollections & programCollection,const std::string & shaderPrefix,const std::string & declCode,const std::string & mainCode,const VkShaderStageFlagBits stage)2066 void initPassthroughPrograms (SourceCollections& programCollection,
2067 const std::string& shaderPrefix,
2068 const std::string& declCode,
2069 const std::string& mainCode,
2070 const VkShaderStageFlagBits stage)
2071 {
2072 const VkShaderStageFlags requiredStages = getRequiredStages(stage);
2073
2074 if (requiredStages & VK_SHADER_STAGE_VERTEX_BIT)
2075 {
2076 std::ostringstream src;
2077 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2078 << "\n"
2079 << "layout(location = 0) in vec4 v_in_position;\n"
2080 << "\n"
2081 << "out " << s_perVertexBlock << ";\n"
2082 << "\n"
2083 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? declCode + "\n" : "")
2084 << "void main (void)\n"
2085 << "{\n"
2086 << " gl_Position = v_in_position;\n"
2087 << (stage & VK_SHADER_STAGE_VERTEX_BIT ? mainCode : "")
2088 << "}\n";
2089
2090 if (!programCollection.glslSources.contains(shaderPrefix + "vert"))
2091 programCollection.glslSources.add(shaderPrefix + "vert") << glu::VertexSource(src.str());
2092 }
2093
2094 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT)
2095 {
2096 std::ostringstream src;
2097 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2098 << "\n"
2099 << "layout(vertices = 3) out;\n"
2100 << "\n"
2101 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2102 << "\n"
2103 << "out " << s_perVertexBlock << " gl_out[];\n"
2104 << "\n"
2105 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? declCode + "\n" : "")
2106 << "void main (void)\n"
2107 << "{\n"
2108 << " gl_TessLevelInner[0] = 1.0;\n"
2109 << " gl_TessLevelInner[1] = 1.0;\n"
2110 << "\n"
2111 << " gl_TessLevelOuter[0] = 1.0;\n"
2112 << " gl_TessLevelOuter[1] = 1.0;\n"
2113 << " gl_TessLevelOuter[2] = 1.0;\n"
2114 << " gl_TessLevelOuter[3] = 1.0;\n"
2115 << "\n"
2116 << " gl_out[gl_InvocationID].gl_Position = gl_in[gl_InvocationID].gl_Position;\n"
2117 << (stage & VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT ? "\n" + mainCode : "")
2118 << "}\n";
2119
2120 if (!programCollection.glslSources.contains(shaderPrefix + "tesc"))
2121 programCollection.glslSources.add(shaderPrefix + "tesc") << glu::TessellationControlSource(src.str());
2122 }
2123
2124 if (requiredStages & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT)
2125 {
2126 std::ostringstream src;
2127 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2128 << "\n"
2129 << "layout(triangles, equal_spacing, ccw) in;\n"
2130 << "\n"
2131 << "in " << s_perVertexBlock << " gl_in[gl_MaxPatchVertices];\n"
2132 << "\n"
2133 << "out " << s_perVertexBlock << ";\n"
2134 << "\n"
2135 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? declCode + "\n" : "")
2136 << "void main (void)\n"
2137 << "{\n"
2138 << " vec3 px = gl_TessCoord.x * gl_in[0].gl_Position.xyz;\n"
2139 << " vec3 py = gl_TessCoord.y * gl_in[1].gl_Position.xyz;\n"
2140 << " vec3 pz = gl_TessCoord.z * gl_in[2].gl_Position.xyz;\n"
2141 << " gl_Position = vec4(px + py + pz, 1.0);\n"
2142 << (stage & VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT ? mainCode : "")
2143 << "}\n";
2144
2145 if (!programCollection.glslSources.contains(shaderPrefix + "tese"))
2146 programCollection.glslSources.add(shaderPrefix + "tese") << glu::TessellationEvaluationSource(src.str());
2147 }
2148
2149 if (requiredStages & VK_SHADER_STAGE_GEOMETRY_BIT)
2150 {
2151 std::ostringstream src;
2152 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2153 << "\n"
2154 << "layout(triangles) in;\n"
2155 << "layout(triangle_strip, max_vertices = 3) out;\n"
2156 << "\n"
2157 << "in " << s_perVertexBlock << " gl_in[];\n"
2158 << "\n"
2159 << "out " << s_perVertexBlock << ";\n"
2160 << "\n"
2161 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? declCode + "\n" : "")
2162 << "void main (void)\n"
2163 << "{\n"
2164 << " gl_Position = gl_in[0].gl_Position;\n"
2165 << " EmitVertex();\n"
2166 << "\n"
2167 << " gl_Position = gl_in[1].gl_Position;\n"
2168 << " EmitVertex();\n"
2169 << "\n"
2170 << " gl_Position = gl_in[2].gl_Position;\n"
2171 << " EmitVertex();\n"
2172 << (stage & VK_SHADER_STAGE_GEOMETRY_BIT ? "\n" + mainCode : "")
2173 << "}\n";
2174
2175 if (!programCollection.glslSources.contains(shaderPrefix + "geom"))
2176 programCollection.glslSources.add(shaderPrefix + "geom") << glu::GeometrySource(src.str());
2177 }
2178
2179 if (requiredStages & VK_SHADER_STAGE_FRAGMENT_BIT)
2180 {
2181 std::ostringstream src;
2182 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2183 << "\n"
2184 << "layout(location = 0) out vec4 o_color;\n"
2185 << "\n"
2186 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? declCode + "\n" : "")
2187 << "void main (void)\n"
2188 << "{\n"
2189 << " o_color = vec4(1.0);\n"
2190 << (stage & VK_SHADER_STAGE_FRAGMENT_BIT ? "\n" + mainCode : "")
2191 << "}\n";
2192
2193 if (!programCollection.glslSources.contains(shaderPrefix + "frag"))
2194 programCollection.glslSources.add(shaderPrefix + "frag") << glu::FragmentSource(src.str());
2195 }
2196
2197 if (requiredStages & VK_SHADER_STAGE_COMPUTE_BIT)
2198 {
2199 std::ostringstream src;
2200 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
2201 << "\n"
2202 << "layout(local_size_x = 1) in;\n"
2203 << "\n"
2204 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? declCode + "\n" : "")
2205 << "void main (void)\n"
2206 << "{\n"
2207 << (stage & VK_SHADER_STAGE_COMPUTE_BIT ? mainCode : "")
2208 << "}\n";
2209
2210 if (!programCollection.glslSources.contains(shaderPrefix + "comp"))
2211 programCollection.glslSources.add(shaderPrefix + "comp") << glu::ComputeSource(src.str());
2212 }
2213 }
2214
2215 class BufferSupport : public OperationSupport
2216 {
2217 public:
BufferSupport(const ResourceDescription & resourceDesc,const BufferType bufferType,const AccessMode mode,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2218 BufferSupport (const ResourceDescription& resourceDesc,
2219 const BufferType bufferType,
2220 const AccessMode mode,
2221 const VkShaderStageFlagBits stage,
2222 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2223 : m_resourceDesc (resourceDesc)
2224 , m_bufferType (bufferType)
2225 , m_mode (mode)
2226 , m_stage (stage)
2227 , m_shaderPrefix (std::string(m_mode == ACCESS_MODE_READ ? "read_" : "write_") + (m_bufferType == BUFFER_TYPE_UNIFORM ? "ubo_" : "ssbo_"))
2228 , m_dispatchCall (dispatchCall)
2229 {
2230 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2231 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE);
2232 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2233 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_bufferType == BUFFER_TYPE_STORAGE);
2234 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2235 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2236
2237 assertValidShaderStage(m_stage);
2238 }
2239
initPrograms(SourceCollections & programCollection) const2240 void initPrograms (SourceCollections& programCollection) const
2241 {
2242 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2243
2244 const std::string bufferTypeStr = (m_bufferType == BUFFER_TYPE_UNIFORM ? "uniform" : "buffer");
2245 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() / sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2246
2247 std::ostringstream declSrc;
2248 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2249 << " uvec4 data[" << numVecElements << "];\n"
2250 << "} b_in;\n"
2251 << "\n"
2252 << "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2253 << " uvec4 data[" << numVecElements << "];\n"
2254 << "} b_out;\n";
2255
2256 std::ostringstream copySrc;
2257 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2258 << " b_out.data[i] = b_in.data[i];\n"
2259 << " }\n";
2260
2261 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2262 }
2263
getInResourceUsageFlags(void) const2264 deUint32 getInResourceUsageFlags (void) const
2265 {
2266 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2267 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2268 else
2269 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2270 }
2271
getOutResourceUsageFlags(void) const2272 deUint32 getOutResourceUsageFlags (void) const
2273 {
2274 if (m_bufferType == BUFFER_TYPE_UNIFORM)
2275 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : 0;
2276 else
2277 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : 0;
2278 }
2279
getQueueFlags(const OperationContext & context) const2280 VkQueueFlags getQueueFlags (const OperationContext& context) const
2281 {
2282 DE_UNREF(context);
2283 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2284 }
2285
build(OperationContext & context,Resource & resource) const2286 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2287 {
2288 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2289 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType, m_shaderPrefix, m_mode, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2290 else
2291 return de::MovePtr<Operation>(new BufferImplementation(context, resource, m_stage, m_bufferType, m_shaderPrefix, m_mode, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2292 }
2293
build(OperationContext &,Resource &,Resource &) const2294 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
2295 {
2296 DE_ASSERT(0);
2297 return de::MovePtr<Operation>();
2298 }
2299
2300 private:
2301 const ResourceDescription m_resourceDesc;
2302 const BufferType m_bufferType;
2303 const AccessMode m_mode;
2304 const VkShaderStageFlagBits m_stage;
2305 const std::string m_shaderPrefix;
2306 const DispatchCall m_dispatchCall;
2307 };
2308
2309 class ImageSupport : public OperationSupport
2310 {
2311 public:
ImageSupport(const ResourceDescription & resourceDesc,const AccessMode mode,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2312 ImageSupport (const ResourceDescription& resourceDesc,
2313 const AccessMode mode,
2314 const VkShaderStageFlagBits stage,
2315 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2316 : m_resourceDesc (resourceDesc)
2317 , m_mode (mode)
2318 , m_stage (stage)
2319 , m_shaderPrefix (m_mode == ACCESS_MODE_READ ? "read_image_" : "write_image_")
2320 , m_dispatchCall (dispatchCall)
2321 {
2322 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2323 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
2324 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2325
2326 assertValidShaderStage(m_stage);
2327 }
2328
initPrograms(SourceCollections & programCollection) const2329 void initPrograms (SourceCollections& programCollection) const
2330 {
2331 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
2332 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
2333
2334 std::ostringstream declSrc;
2335 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
2336 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType << " dstImg;\n";
2337
2338 std::ostringstream mainSrc;
2339 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
2340 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2341 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
2342 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
2343 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2344 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2345 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
2346 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
2347 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
2348 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2349 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2350 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
2351 else
2352 DE_ASSERT(0);
2353
2354 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
2355 }
2356
getInResourceUsageFlags(void) const2357 deUint32 getInResourceUsageFlags (void) const
2358 {
2359 return VK_IMAGE_USAGE_STORAGE_BIT;
2360 }
2361
getOutResourceUsageFlags(void) const2362 deUint32 getOutResourceUsageFlags (void) const
2363 {
2364 return VK_IMAGE_USAGE_STORAGE_BIT;
2365 }
2366
getQueueFlags(const OperationContext & context) const2367 VkQueueFlags getQueueFlags (const OperationContext& context) const
2368 {
2369 DE_UNREF(context);
2370 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2371 }
2372
build(OperationContext & context,Resource & resource) const2373 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2374 {
2375 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2376 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2377 else
2378 return de::MovePtr<Operation>(new ImageImplementation(context, resource, m_stage, m_shaderPrefix, m_mode, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2379 }
2380
build(OperationContext &,Resource &,Resource &) const2381 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
2382 {
2383 DE_ASSERT(0);
2384 return de::MovePtr<Operation>();
2385 }
2386
2387 private:
2388 const ResourceDescription m_resourceDesc;
2389 const AccessMode m_mode;
2390 const VkShaderStageFlagBits m_stage;
2391 const std::string m_shaderPrefix;
2392 const DispatchCall m_dispatchCall;
2393 };
2394
2395 //! Copy operation on a UBO/SSBO in graphics/compute pipeline.
2396 class BufferCopyImplementation : public Operation
2397 {
2398 public:
BufferCopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource,const VkShaderStageFlagBits stage,const BufferType bufferType,const std::string & shaderPrefix,const PipelineType pipelineType,const DispatchCall dispatchCall)2399 BufferCopyImplementation (OperationContext& context,
2400 Resource& inResource,
2401 Resource& outResource,
2402 const VkShaderStageFlagBits stage,
2403 const BufferType bufferType,
2404 const std::string& shaderPrefix,
2405 const PipelineType pipelineType,
2406 const DispatchCall dispatchCall)
2407 : m_context (context)
2408 , m_inResource (inResource)
2409 , m_outResource (outResource)
2410 , m_stage (stage)
2411 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2412 , m_bufferType (bufferType)
2413 , m_dispatchCall (dispatchCall)
2414 {
2415 requireFeaturesForSSBOAccess (m_context, m_stage);
2416
2417 const DeviceInterface& vk = m_context.getDeviceInterface();
2418 const VkDevice device = m_context.getDevice();
2419
2420 // Prepare descriptors
2421 {
2422 const VkDescriptorType bufferDescriptorType = (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
2423
2424 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2425 .addSingleBinding(bufferDescriptorType, m_stage)
2426 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
2427 .build(vk, device);
2428
2429 m_descriptorPool = DescriptorPoolBuilder()
2430 .addType(bufferDescriptorType)
2431 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2432 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2433
2434 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2435
2436 const VkDescriptorBufferInfo inBufferInfo = makeDescriptorBufferInfo(m_inResource.getBuffer().handle, m_inResource.getBuffer().offset, m_inResource.getBuffer().size);
2437 const VkDescriptorBufferInfo outBufferInfo = makeDescriptorBufferInfo(m_outResource.getBuffer().handle, m_outResource.getBuffer().offset, m_outResource.getBuffer().size);
2438
2439 DescriptorSetUpdateBuilder()
2440 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &inBufferInfo)
2441 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outBufferInfo)
2442 .update(vk, device);
2443 }
2444
2445 // Create pipeline
2446 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
2447 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2448 }
2449
recordCommands(const VkCommandBuffer cmdBuffer)2450 void recordCommands (const VkCommandBuffer cmdBuffer)
2451 {
2452 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2453 }
2454
getInSyncInfo(void) const2455 SyncInfo getInSyncInfo (void) const
2456 {
2457 const SyncInfo syncInfo =
2458 {
2459 m_pipelineStage, // VkPipelineStageFlags stageMask;
2460 VK_ACCESS_2_SHADER_READ_BIT_KHR, // VkAccessFlags accessMask;
2461 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2462 };
2463 return syncInfo;
2464 }
2465
getOutSyncInfo(void) const2466 SyncInfo getOutSyncInfo (void) const
2467 {
2468 const SyncInfo syncInfo =
2469 {
2470 m_pipelineStage, // VkPipelineStageFlags stageMask;
2471 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
2472 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
2473 };
2474 return syncInfo;
2475 }
2476
getData(void) const2477 Data getData (void) const
2478 {
2479 Data data = { 0, DE_NULL };
2480 return data;
2481 }
2482
setData(const Data &)2483 void setData (const Data&)
2484 {
2485 DE_ASSERT(0);
2486 }
2487
2488 private:
2489 OperationContext& m_context;
2490 Resource& m_inResource;
2491 Resource& m_outResource;
2492 const VkShaderStageFlagBits m_stage;
2493 const VkPipelineStageFlags m_pipelineStage;
2494 const BufferType m_bufferType;
2495 const DispatchCall m_dispatchCall;
2496 Move<VkDescriptorPool> m_descriptorPool;
2497 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2498 Move<VkDescriptorSet> m_descriptorSet;
2499 de::MovePtr<Pipeline> m_pipeline;
2500 };
2501
2502 class CopyBufferSupport : public OperationSupport
2503 {
2504 public:
CopyBufferSupport(const ResourceDescription & resourceDesc,const BufferType bufferType,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2505 CopyBufferSupport (const ResourceDescription& resourceDesc,
2506 const BufferType bufferType,
2507 const VkShaderStageFlagBits stage,
2508 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2509 : m_resourceDesc (resourceDesc)
2510 , m_bufferType (bufferType)
2511 , m_stage (stage)
2512 , m_shaderPrefix (std::string("copy_") + getShaderStageName(stage) + (m_bufferType == BUFFER_TYPE_UNIFORM ? "_ubo_" : "_ssbo_"))
2513 , m_dispatchCall (dispatchCall)
2514 {
2515 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER);
2516 DE_ASSERT(m_bufferType == BUFFER_TYPE_UNIFORM || m_bufferType == BUFFER_TYPE_STORAGE);
2517 DE_ASSERT(m_bufferType != BUFFER_TYPE_UNIFORM || m_resourceDesc.size.x() <= MAX_UBO_RANGE);
2518 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2519
2520 assertValidShaderStage(m_stage);
2521 }
2522
initPrograms(SourceCollections & programCollection) const2523 void initPrograms (SourceCollections& programCollection) const
2524 {
2525 DE_ASSERT((m_resourceDesc.size.x() % sizeof(tcu::UVec4)) == 0);
2526
2527 const std::string bufferTypeStr = (m_bufferType == BUFFER_TYPE_UNIFORM ? "uniform" : "buffer");
2528 const int numVecElements = static_cast<int>(m_resourceDesc.size.x() / sizeof(tcu::UVec4)); // std140 must be aligned to a multiple of 16
2529
2530 std::ostringstream declSrc;
2531 declSrc << "layout(set = 0, binding = 0, std140) readonly " << bufferTypeStr << " Input {\n"
2532 << " uvec4 data[" << numVecElements << "];\n"
2533 << "} b_in;\n"
2534 << "\n"
2535 << "layout(set = 0, binding = 1, std140) writeonly buffer Output {\n"
2536 << " uvec4 data[" << numVecElements << "];\n"
2537 << "} b_out;\n";
2538
2539 std::ostringstream copySrc;
2540 copySrc << " for (int i = 0; i < " << numVecElements << "; ++i) {\n"
2541 << " b_out.data[i] = b_in.data[i];\n"
2542 << " }\n";
2543
2544 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), copySrc.str(), m_stage);
2545 }
2546
getInResourceUsageFlags(void) const2547 deUint32 getInResourceUsageFlags (void) const
2548 {
2549 return (m_bufferType == BUFFER_TYPE_UNIFORM ? VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
2550 }
2551
getOutResourceUsageFlags(void) const2552 deUint32 getOutResourceUsageFlags (void) const
2553 {
2554 return VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
2555 }
2556
getQueueFlags(const OperationContext & context) const2557 VkQueueFlags getQueueFlags (const OperationContext& context) const
2558 {
2559 DE_UNREF(context);
2560 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2561 }
2562
build(OperationContext &,Resource &) const2563 de::MovePtr<Operation> build (OperationContext&, Resource&) const
2564 {
2565 DE_ASSERT(0);
2566 return de::MovePtr<Operation>();
2567 }
2568
build(OperationContext & context,Resource & inResource,Resource & outResource) const2569 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
2570 {
2571 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2572 return de::MovePtr<Operation>(new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2573 else
2574 return de::MovePtr<Operation>(new BufferCopyImplementation(context, inResource, outResource, m_stage, m_bufferType, m_shaderPrefix, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2575 }
2576
2577 private:
2578 const ResourceDescription m_resourceDesc;
2579 const BufferType m_bufferType;
2580 const VkShaderStageFlagBits m_stage;
2581 const std::string m_shaderPrefix;
2582 const DispatchCall m_dispatchCall;
2583 };
2584
2585 class CopyImageImplementation : public Operation
2586 {
2587 public:
CopyImageImplementation(OperationContext & context,Resource & inResource,Resource & outResource,const VkShaderStageFlagBits stage,const std::string & shaderPrefix,const PipelineType pipelineType,const DispatchCall dispatchCall)2588 CopyImageImplementation (OperationContext& context,
2589 Resource& inResource,
2590 Resource& outResource,
2591 const VkShaderStageFlagBits stage,
2592 const std::string& shaderPrefix,
2593 const PipelineType pipelineType,
2594 const DispatchCall dispatchCall)
2595 : m_context (context)
2596 , m_inResource (inResource)
2597 , m_outResource (outResource)
2598 , m_stage (stage)
2599 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
2600 , m_dispatchCall (dispatchCall)
2601 {
2602 const DeviceInterface& vk = m_context.getDeviceInterface();
2603 const InstanceInterface& vki = m_context.getInstanceInterface();
2604 const VkDevice device = m_context.getDevice();
2605 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2606
2607 // Image stores are always required, in either access mode.
2608 requireFeaturesForSSBOAccess(m_context, m_stage);
2609
2610 // Some storage image formats may not be supported
2611 requireStorageImageSupport(vki, physDevice, m_inResource.getImage().format);
2612
2613 // Image resources
2614 {
2615 const VkImageViewType viewType = getImageViewType(m_inResource.getImage().imageType);
2616
2617 m_srcImageView = makeImageView(vk, device, m_inResource.getImage().handle, viewType, m_inResource.getImage().format, m_inResource.getImage().subresourceRange);
2618 m_dstImageView = makeImageView(vk, device, m_outResource.getImage().handle, viewType, m_outResource.getImage().format, m_outResource.getImage().subresourceRange);
2619 }
2620
2621 // Prepare descriptors
2622 {
2623 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2624 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2625 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, m_stage)
2626 .build(vk, device);
2627
2628 m_descriptorPool = DescriptorPoolBuilder()
2629 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2630 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2631 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2632
2633 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2634
2635 const VkDescriptorImageInfo srcImageInfo = makeDescriptorImageInfo(DE_NULL, *m_srcImageView, VK_IMAGE_LAYOUT_GENERAL);
2636 const VkDescriptorImageInfo dstImageInfo = makeDescriptorImageInfo(DE_NULL, *m_dstImageView, VK_IMAGE_LAYOUT_GENERAL);
2637
2638 DescriptorSetUpdateBuilder()
2639 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &srcImageInfo)
2640 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &dstImageInfo)
2641 .update(vk, device);
2642 }
2643
2644 // Create pipeline
2645 m_pipeline = (pipelineType == PIPELINE_TYPE_GRAPHICS ? de::MovePtr<Pipeline>(new GraphicsPipeline(context, stage, shaderPrefix, *m_descriptorSetLayout))
2646 : de::MovePtr<Pipeline>(new ComputePipeline(context, m_dispatchCall, shaderPrefix, *m_descriptorSetLayout)));
2647 }
2648
recordCommands(const VkCommandBuffer cmdBuffer)2649 void recordCommands (const VkCommandBuffer cmdBuffer)
2650 {
2651 {
2652 const DeviceInterface& vk = m_context.getDeviceInterface();
2653 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
2654
2655 const VkImageMemoryBarrier2KHR imageMemoryBarriers2 = makeImageMemoryBarrier2(
2656 VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, // VkPipelineStageFlags2KHR srcStageMask
2657 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2658 m_pipelineStage, // VkPipelineStageFlags2KHR dstStageMask
2659 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2660 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2661 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2662 m_outResource.getImage().handle, // VkImage image
2663 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2664 );
2665 VkDependencyInfoKHR dependencyInfo
2666 {
2667 VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR, // VkStructureType sType
2668 DE_NULL, // const void* pNext
2669 VK_DEPENDENCY_BY_REGION_BIT, // VkDependencyFlags dependencyFlags
2670 0u, // deUint32 memoryBarrierCount
2671 DE_NULL, // const VkMemoryBarrier2KHR* pMemoryBarriers
2672 0u, // deUint32 bufferMemoryBarrierCount
2673 DE_NULL, // const VkBufferMemoryBarrier2KHR* pBufferMemoryBarriers
2674 1, // deUint32 imageMemoryBarrierCount
2675 &imageMemoryBarriers2 // const VkImageMemoryBarrier2KHR* pImageMemoryBarriers
2676 };
2677 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2678 }
2679
2680 // Execute shaders
2681 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
2682 }
2683
getInSyncInfo(void) const2684 SyncInfo getInSyncInfo (void) const
2685 {
2686 const SyncInfo syncInfo =
2687 {
2688 m_pipelineStage, // VkPipelineStageFlags stageMask;
2689 VK_ACCESS_2_SHADER_READ_BIT_KHR, // VkAccessFlags accessMask;
2690 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2691 };
2692 return syncInfo;
2693 }
2694
getOutSyncInfo(void) const2695 SyncInfo getOutSyncInfo (void) const
2696 {
2697 const SyncInfo syncInfo =
2698 {
2699 m_pipelineStage, // VkPipelineStageFlags stageMask;
2700 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
2701 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2702 };
2703 return syncInfo;
2704 }
2705
getData(void) const2706 Data getData (void) const
2707 {
2708 Data data = { 0, DE_NULL };
2709 return data;
2710 }
2711
setData(const Data &)2712 void setData (const Data&)
2713 {
2714 DE_ASSERT(0);
2715 }
2716
2717 private:
2718 OperationContext& m_context;
2719 Resource& m_inResource;
2720 Resource& m_outResource;
2721 const VkShaderStageFlagBits m_stage;
2722 const VkPipelineStageFlags m_pipelineStage;
2723 const DispatchCall m_dispatchCall;
2724 Move<VkImageView> m_srcImageView;
2725 Move<VkImageView> m_dstImageView;
2726 Move<VkDescriptorPool> m_descriptorPool;
2727 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2728 Move<VkDescriptorSet> m_descriptorSet;
2729 de::MovePtr<Pipeline> m_pipeline;
2730 };
2731
2732 class CopyImageSupport : public OperationSupport
2733 {
2734 public:
CopyImageSupport(const ResourceDescription & resourceDesc,const VkShaderStageFlagBits stage,const DispatchCall dispatchCall=DISPATCH_CALL_DISPATCH)2735 CopyImageSupport (const ResourceDescription& resourceDesc,
2736 const VkShaderStageFlagBits stage,
2737 const DispatchCall dispatchCall = DISPATCH_CALL_DISPATCH)
2738 : m_resourceDesc (resourceDesc)
2739 , m_stage (stage)
2740 , m_shaderPrefix (std::string("copy_image_") + getShaderStageName(stage) + "_")
2741 , m_dispatchCall (dispatchCall)
2742 {
2743 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2744 DE_ASSERT(m_dispatchCall == DISPATCH_CALL_DISPATCH || m_dispatchCall == DISPATCH_CALL_DISPATCH_INDIRECT);
2745
2746 assertValidShaderStage(m_stage);
2747 }
2748
initPrograms(SourceCollections & programCollection) const2749 void initPrograms (SourceCollections& programCollection) const
2750 {
2751 const std::string imageFormat = getShaderImageFormatQualifier(m_resourceDesc.imageFormat);
2752 const std::string imageType = getShaderImageType(m_resourceDesc.imageFormat, m_resourceDesc.imageType);
2753
2754 std::ostringstream declSrc;
2755 declSrc << "layout(set = 0, binding = 0, " << imageFormat << ") readonly uniform " << imageType << " srcImg;\n"
2756 << "layout(set = 0, binding = 1, " << imageFormat << ") writeonly uniform " << imageType << " dstImg;\n";
2757
2758 std::ostringstream mainSrc;
2759 if (m_resourceDesc.imageType == VK_IMAGE_TYPE_1D)
2760 mainSrc << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2761 << " imageStore(dstImg, x, imageLoad(srcImg, x));\n";
2762 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_2D)
2763 mainSrc << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2764 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2765 << " imageStore(dstImg, ivec2(x, y), imageLoad(srcImg, ivec2(x, y)));\n";
2766 else if (m_resourceDesc.imageType == VK_IMAGE_TYPE_3D)
2767 mainSrc << " for (int z = 0; z < " << m_resourceDesc.size.z() << "; ++z)\n"
2768 << " for (int y = 0; y < " << m_resourceDesc.size.y() << "; ++y)\n"
2769 << " for (int x = 0; x < " << m_resourceDesc.size.x() << "; ++x)\n"
2770 << " imageStore(dstImg, ivec3(x, y, z), imageLoad(srcImg, ivec3(x, y, z)));\n";
2771 else
2772 DE_ASSERT(0);
2773
2774 initPassthroughPrograms(programCollection, m_shaderPrefix, declSrc.str(), mainSrc.str(), m_stage);
2775 }
2776
getInResourceUsageFlags(void) const2777 deUint32 getInResourceUsageFlags (void) const
2778 {
2779 return VK_IMAGE_USAGE_STORAGE_BIT;
2780 }
2781
getOutResourceUsageFlags(void) const2782 deUint32 getOutResourceUsageFlags (void) const
2783 {
2784 return VK_IMAGE_USAGE_STORAGE_BIT;
2785 }
2786
getQueueFlags(const OperationContext & context) const2787 VkQueueFlags getQueueFlags (const OperationContext& context) const
2788 {
2789 DE_UNREF(context);
2790 return (m_stage == VK_SHADER_STAGE_COMPUTE_BIT ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
2791 }
2792
build(OperationContext &,Resource &) const2793 de::MovePtr<Operation> build (OperationContext&, Resource&) const
2794 {
2795 DE_ASSERT(0);
2796 return de::MovePtr<Operation>();
2797 }
2798
build(OperationContext & context,Resource & inResource,Resource & outResource) const2799 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
2800 {
2801 if (m_stage & VK_SHADER_STAGE_COMPUTE_BIT)
2802 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage, m_shaderPrefix, PIPELINE_TYPE_COMPUTE, m_dispatchCall));
2803 else
2804 return de::MovePtr<Operation>(new CopyImageImplementation(context, inResource, outResource, m_stage, m_shaderPrefix, PIPELINE_TYPE_GRAPHICS, m_dispatchCall));
2805 }
2806
2807 private:
2808 const ResourceDescription m_resourceDesc;
2809 const VkShaderStageFlagBits m_stage;
2810 const std::string m_shaderPrefix;
2811 const DispatchCall m_dispatchCall;
2812 };
2813
2814 class MSImageImplementation : public Operation
2815 {
2816 public:
MSImageImplementation(OperationContext & context,Resource & resource)2817 MSImageImplementation(OperationContext& context,
2818 Resource& resource)
2819 : m_context (context)
2820 , m_resource (resource)
2821 , m_hostBufferSizeBytes(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
2822 {
2823 const DeviceInterface& vk = m_context.getDeviceInterface();
2824 const InstanceInterface& vki = m_context.getInstanceInterface();
2825 const VkDevice device = m_context.getDevice();
2826 const VkPhysicalDevice physDevice = m_context.getPhysicalDevice();
2827 const VkPhysicalDeviceFeatures features = getPhysicalDeviceFeatures(vki, physDevice);
2828 Allocator& allocator = m_context.getAllocator();
2829
2830 requireStorageImageSupport(vki, physDevice, m_resource.getImage().format);
2831 if (!features.shaderStorageImageMultisample)
2832 TCU_THROW(NotSupportedError, "Using multisample images as storage is not supported");
2833
2834 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_STORAGE_BUFFER_BIT);
2835 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::HostVisible));
2836 const Allocation& alloc = m_hostBuffer->getAllocation();
2837 fillPattern(alloc.getHostPtr(), m_hostBufferSizeBytes);
2838 flushAlloc(vk, device, alloc);
2839
2840 const ImageResource& image = m_resource.getImage();
2841 const VkImageViewType viewType = getImageViewType(image.imageType);
2842 m_imageView = makeImageView(vk, device, image.handle, viewType, image.format, image.subresourceRange);
2843
2844 // Prepare descriptors
2845 {
2846 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
2847 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_COMPUTE_BIT)
2848 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
2849 .build(vk, device);
2850
2851 m_descriptorPool = DescriptorPoolBuilder()
2852 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2853 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2854 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
2855
2856 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
2857
2858 const VkDescriptorBufferInfo bufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
2859 const VkDescriptorImageInfo imageInfo = makeDescriptorImageInfo(DE_NULL, *m_imageView, VK_IMAGE_LAYOUT_GENERAL);
2860
2861 DescriptorSetUpdateBuilder()
2862 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &bufferInfo)
2863 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &imageInfo)
2864 .update(vk, device);
2865 }
2866
2867 // Create pipeline
2868 const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get("comp"), (VkShaderModuleCreateFlags)0));
2869 m_pipelineLayout = makePipelineLayout (vk, device, *m_descriptorSetLayout);
2870 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData());
2871 }
2872
recordCommands(const VkCommandBuffer cmdBuffer)2873 void recordCommands(const VkCommandBuffer cmdBuffer)
2874 {
2875 const DeviceInterface& vk = m_context.getDeviceInterface();
2876 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
2877
2878 // change image layout
2879 {
2880 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
2881 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
2882 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
2883 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
2884 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
2885 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
2886 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout newLayout
2887 m_resource.getImage().handle, // VkImage image
2888 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
2889 );
2890 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
2891 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
2892 }
2893
2894 // execute shader
2895 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
2896 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &*m_descriptorSet, 0u, DE_NULL);
2897 vk.cmdDispatch(cmdBuffer, m_resource.getImage().extent.width, m_resource.getImage().extent.height, 1u);
2898 }
2899
getInSyncInfo(void) const2900 SyncInfo getInSyncInfo(void) const
2901 {
2902 DE_ASSERT(false);
2903 return emptySyncInfo;
2904 }
2905
getOutSyncInfo(void) const2906 SyncInfo getOutSyncInfo(void) const
2907 {
2908 return
2909 {
2910 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR, // VkPipelineStageFlags stageMask;
2911 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
2912 VK_IMAGE_LAYOUT_GENERAL, // VkImageLayout imageLayout;
2913 };
2914 }
2915
getData(void) const2916 Data getData(void) const
2917 {
2918 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
2919 }
2920
setData(const Data &)2921 void setData(const Data&)
2922 {
2923 DE_ASSERT(false);
2924 }
2925
2926 private:
2927 OperationContext& m_context;
2928 Resource& m_resource;
2929 Move<VkImageView> m_imageView;
2930
2931 const VkDeviceSize m_hostBufferSizeBytes;
2932 de::MovePtr<Buffer> m_hostBuffer;
2933
2934 Move<VkDescriptorPool> m_descriptorPool;
2935 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
2936 Move<VkDescriptorSet> m_descriptorSet;
2937 Move<VkPipelineLayout> m_pipelineLayout;
2938 Move<VkPipeline> m_pipeline;
2939 };
2940
2941 class MSImageSupport : public OperationSupport
2942 {
2943 public:
MSImageSupport(const ResourceDescription & resourceDesc)2944 MSImageSupport(const ResourceDescription& resourceDesc)
2945 : m_resourceDesc (resourceDesc)
2946 {
2947 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
2948 }
2949
initPrograms(SourceCollections & programCollection) const2950 void initPrograms (SourceCollections& programCollection) const
2951 {
2952 std::stringstream source;
2953 source <<
2954 "#version 440\n"
2955 "\n"
2956 "layout(local_size_x = 1) in;\n"
2957 "layout(set = 0, binding = 0, std430) readonly buffer Input {\n"
2958 " uint data[];\n"
2959 "} inData;\n"
2960 "layout(set = 0, binding = 1, r32ui) writeonly uniform uimage2DMS msImage;\n"
2961 "\n"
2962 "void main (void)\n"
2963 "{\n"
2964 " int gx = int(gl_GlobalInvocationID.x);\n"
2965 " int gy = int(gl_GlobalInvocationID.y);\n"
2966 " uint value = inData.data[gy * " << m_resourceDesc.size.x() << " + gx];\n"
2967 " for (int sampleNdx = 0; sampleNdx < " << m_resourceDesc.imageSamples << "; ++sampleNdx)\n"
2968 " imageStore(msImage, ivec2(gx, gy), sampleNdx, uvec4(value));\n"
2969 "}\n";
2970 programCollection.glslSources.add("comp") << glu::ComputeSource(source.str().c_str());
2971 }
2972
getInResourceUsageFlags(void) const2973 deUint32 getInResourceUsageFlags (void) const
2974 {
2975 return 0;
2976 }
2977
getOutResourceUsageFlags(void) const2978 deUint32 getOutResourceUsageFlags (void) const
2979 {
2980 return VK_IMAGE_USAGE_STORAGE_BIT;
2981 }
2982
getQueueFlags(const OperationContext &) const2983 VkQueueFlags getQueueFlags (const OperationContext&) const
2984 {
2985 return VK_QUEUE_COMPUTE_BIT;
2986 }
2987
build(OperationContext & context,Resource & resource) const2988 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
2989 {
2990 return de::MovePtr<Operation>(new MSImageImplementation(context, resource));
2991 }
2992
build(OperationContext &,Resource &,Resource &) const2993 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
2994 {
2995 DE_ASSERT(0);
2996 return de::MovePtr<Operation>();
2997 }
2998
2999 private:
3000 const ResourceDescription m_resourceDesc;
3001 };
3002
3003 } // ShaderAccess ns
3004
3005 namespace CopyBufferToImage
3006 {
3007
3008 class WriteImplementation : public Operation
3009 {
3010 public:
WriteImplementation(OperationContext & context,Resource & resource)3011 WriteImplementation (OperationContext& context, Resource& resource)
3012 : m_context (context)
3013 , m_resource (resource)
3014 , m_bufferSize (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3015 {
3016 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3017
3018 const DeviceInterface& vk = m_context.getDeviceInterface();
3019 const VkDevice device = m_context.getDevice();
3020 Allocator& allocator = m_context.getAllocator();
3021
3022 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3023 vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible));
3024
3025 const Allocation& alloc = m_hostBuffer->getAllocation();
3026 fillPattern(alloc.getHostPtr(), m_bufferSize);
3027 flushAlloc(vk, device, alloc);
3028 }
3029
recordCommands(const VkCommandBuffer cmdBuffer)3030 void recordCommands (const VkCommandBuffer cmdBuffer)
3031 {
3032 const DeviceInterface& vk = m_context.getDeviceInterface();
3033 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3034 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3035
3036 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3037 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3038 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3039 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3040 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3041 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3042 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3043 m_resource.getImage().handle, // VkImage image
3044 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3045 );
3046 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3047 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3048
3049 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3050 }
3051
getInSyncInfo(void) const3052 SyncInfo getInSyncInfo (void) const
3053 {
3054 return emptySyncInfo;
3055 }
3056
getOutSyncInfo(void) const3057 SyncInfo getOutSyncInfo (void) const
3058 {
3059 const SyncInfo syncInfo =
3060 {
3061 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3062 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3063 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3064 };
3065 return syncInfo;
3066 }
3067
getData(void) const3068 Data getData (void) const
3069 {
3070 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3071 }
3072
setData(const Data & data)3073 void setData (const Data& data)
3074 {
3075 setHostBufferData(m_context, *m_hostBuffer, data);
3076 }
3077
3078 private:
3079 OperationContext& m_context;
3080 Resource& m_resource;
3081 de::MovePtr<Buffer> m_hostBuffer;
3082 const VkDeviceSize m_bufferSize;
3083 };
3084
3085 class ReadImplementation : public Operation
3086 {
3087 public:
ReadImplementation(OperationContext & context,Resource & resource)3088 ReadImplementation (OperationContext& context, Resource& resource)
3089 : m_context (context)
3090 , m_resource (resource)
3091 , m_subresourceRange (makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3092 , m_subresourceLayers (makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3093 {
3094 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3095
3096 const DeviceInterface& vk = m_context.getDeviceInterface();
3097 const VkDevice device = m_context.getDevice();
3098 Allocator& allocator = m_context.getAllocator();
3099 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3100 const deUint32 pixelSize = tcu::getPixelSize(mapVkFormat(format));
3101
3102 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3103 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize); // there may be some unused space at the end
3104
3105 // Copy destination image.
3106 m_image = de::MovePtr<Image>(new Image(
3107 vk, device, allocator, makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT), MemoryRequirement::Any));
3108
3109 // Image data will be copied here, so it can be read on the host.
3110 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3111 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible));
3112 }
3113
recordCommands(const VkCommandBuffer cmdBuffer)3114 void recordCommands (const VkCommandBuffer cmdBuffer)
3115 {
3116 const DeviceInterface& vk = m_context.getDeviceInterface();
3117 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3118 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3119
3120 // Resource -> Image
3121 {
3122 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3123 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3124 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3125 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3126 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3127 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3128 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3129 **m_image, // VkImage image
3130 m_subresourceRange // VkImageSubresourceRange subresourceRange
3131 );
3132 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3133 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3134
3135 vk.cmdCopyBufferToImage(cmdBuffer, m_resource.getBuffer().handle, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3136 }
3137 // Image -> Host buffer
3138 {
3139 const VkImageMemoryBarrier2KHR imageLayoutBarrier2 = makeImageMemoryBarrier2(
3140 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3141 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3142 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3143 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3144 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3145 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3146 **m_image, // VkImage image
3147 m_subresourceRange // VkImageSubresourceRange subresourceRange
3148 );
3149 VkDependencyInfoKHR layoutDependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageLayoutBarrier2);
3150 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &layoutDependencyInfo);
3151
3152 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, ©Region);
3153
3154 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3155 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3156 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3157 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3158 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3159 **m_hostBuffer, // VkBuffer buffer
3160 0u, // VkDeviceSize offset
3161 m_resource.getBuffer().size // VkDeviceSize size
3162 );
3163 VkDependencyInfoKHR bufferDependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
3164 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &bufferDependencyInfo);
3165 }
3166 }
3167
getInSyncInfo(void) const3168 SyncInfo getInSyncInfo (void) const
3169 {
3170 const SyncInfo syncInfo =
3171 {
3172 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3173 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3174 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3175 };
3176 return syncInfo;
3177 }
3178
getOutSyncInfo(void) const3179 SyncInfo getOutSyncInfo (void) const
3180 {
3181 return emptySyncInfo;
3182 }
3183
getData(void) const3184 Data getData (void) const
3185 {
3186 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3187 }
3188
setData(const Data & data)3189 void setData (const Data& data)
3190 {
3191 setHostBufferData(m_context, *m_hostBuffer, data);
3192 }
3193
3194 private:
3195 OperationContext& m_context;
3196 Resource& m_resource;
3197 const VkImageSubresourceRange m_subresourceRange;
3198 const VkImageSubresourceLayers m_subresourceLayers;
3199 de::MovePtr<Buffer> m_hostBuffer;
3200 de::MovePtr<Image> m_image;
3201 VkExtent3D m_imageExtent;
3202 };
3203
3204 class Support : public OperationSupport
3205 {
3206 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)3207 Support (const ResourceDescription& resourceDesc, const AccessMode mode)
3208 : m_mode (mode)
3209 , m_resourceType (resourceDesc.type)
3210 , m_requiredQueueFlags (resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
3211 {
3212 // From spec:
3213 // Because depth or stencil aspect buffer to image copies may require format conversions on some implementations,
3214 // they are not supported on queues that do not support graphics.
3215
3216 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
3217 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_BUFFER);
3218 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_IMAGE);
3219 }
3220
getInResourceUsageFlags(void) const3221 deUint32 getInResourceUsageFlags (void) const
3222 {
3223 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3224 return m_mode == ACCESS_MODE_READ ? VK_IMAGE_USAGE_TRANSFER_SRC_BIT : 0;
3225 else
3226 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
3227 }
3228
getOutResourceUsageFlags(void) const3229 deUint32 getOutResourceUsageFlags (void) const
3230 {
3231 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3232 return m_mode == ACCESS_MODE_WRITE ? VK_IMAGE_USAGE_TRANSFER_DST_BIT : 0;
3233 else
3234 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
3235 }
3236
getQueueFlags(const OperationContext & context) const3237 VkQueueFlags getQueueFlags (const OperationContext& context) const
3238 {
3239 DE_UNREF(context);
3240 return m_requiredQueueFlags;
3241 }
3242
build(OperationContext & context,Resource & resource) const3243 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3244 {
3245 if (m_mode == ACCESS_MODE_READ)
3246 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
3247 else
3248 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
3249 }
3250
build(OperationContext &,Resource &,Resource &) const3251 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
3252 {
3253 DE_ASSERT(0);
3254 return de::MovePtr<Operation>();
3255 }
3256
3257 private:
3258 const AccessMode m_mode;
3259 const enum ResourceType m_resourceType;
3260 const VkQueueFlags m_requiredQueueFlags;
3261 };
3262
3263 class CopyImplementation : public Operation
3264 {
3265 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)3266 CopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
3267 : m_context (context)
3268 , m_inResource (inResource)
3269 , m_outResource (outResource)
3270 {
3271 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_BUFFER);
3272 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_IMAGE);
3273 }
3274
recordCommands(const VkCommandBuffer cmdBuffer)3275 void recordCommands (const VkCommandBuffer cmdBuffer)
3276 {
3277 const DeviceInterface& vk = m_context.getDeviceInterface();
3278 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_outResource.getImage().extent, m_outResource.getImage().subresourceLayers);
3279 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3280
3281 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3282 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3283 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3284 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3285 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3286 m_inResource.getBuffer().handle, // VkBuffer buffer
3287 0u, // VkDeviceSize offset
3288 m_inResource.getBuffer().size // VkDeviceSize size
3289 );
3290 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3291 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3292 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3293 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3294 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3295 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3296 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3297 m_outResource.getImage().handle, // VkImage image
3298 m_outResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3299 );
3300 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3301 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3302
3303 vk.cmdCopyBufferToImage(cmdBuffer, m_inResource.getBuffer().handle, m_outResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3304 }
3305
getInSyncInfo(void) const3306 SyncInfo getInSyncInfo (void) const
3307 {
3308 const SyncInfo syncInfo =
3309 {
3310 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3311 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3312 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3313 };
3314 return syncInfo;
3315 }
3316
getOutSyncInfo(void) const3317 SyncInfo getOutSyncInfo (void) const
3318 {
3319 const SyncInfo syncInfo =
3320 {
3321 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3322 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3323 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3324 };
3325 return syncInfo;
3326 }
3327
getData(void) const3328 Data getData (void) const
3329 {
3330 Data data = { 0, DE_NULL };
3331 return data;
3332 }
3333
setData(const Data &)3334 void setData (const Data&)
3335 {
3336 DE_ASSERT(0);
3337 }
3338
3339 private:
3340 OperationContext& m_context;
3341 Resource& m_inResource;
3342 Resource& m_outResource;
3343 };
3344
3345 class CopySupport : public OperationSupport
3346 {
3347 public:
CopySupport(const ResourceDescription & resourceDesc)3348 CopySupport (const ResourceDescription& resourceDesc)
3349 : m_resourceType (resourceDesc.type)
3350 , m_requiredQueueFlags (resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
3351 {
3352 }
3353
getInResourceUsageFlags(void) const3354 deUint32 getInResourceUsageFlags (void) const
3355 {
3356 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3357 return VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3358 else
3359 return VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
3360 }
3361
getOutResourceUsageFlags(void) const3362 deUint32 getOutResourceUsageFlags (void) const
3363 {
3364 if (m_resourceType == RESOURCE_TYPE_IMAGE)
3365 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3366 else
3367 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3368 }
3369
getQueueFlags(const OperationContext & context) const3370 VkQueueFlags getQueueFlags (const OperationContext& context) const
3371 {
3372 DE_UNREF(context);
3373 return m_requiredQueueFlags;
3374 }
3375
build(OperationContext &,Resource &) const3376 de::MovePtr<Operation> build (OperationContext&, Resource&) const
3377 {
3378 DE_ASSERT(0);
3379 return de::MovePtr<Operation>();
3380 }
3381
build(OperationContext & context,Resource & inResource,Resource & outResource) const3382 de::MovePtr<Operation> build (OperationContext& context, Resource& inResource, Resource& outResource) const
3383 {
3384 return de::MovePtr<Operation>(new CopyImplementation(context, inResource, outResource));
3385 }
3386
3387 private:
3388 const enum ResourceType m_resourceType;
3389 const VkQueueFlags m_requiredQueueFlags;
3390 };
3391
3392 } // CopyBufferToImage ns
3393
3394 namespace CopyImageToBuffer
3395 {
3396
3397 class WriteImplementation : public Operation
3398 {
3399 public:
WriteImplementation(OperationContext & context,Resource & resource)3400 WriteImplementation (OperationContext& context, Resource& resource)
3401 : m_context (context)
3402 , m_resource (resource)
3403 , m_subresourceRange (makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3404 , m_subresourceLayers (makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3405 {
3406 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_BUFFER);
3407
3408 const DeviceInterface& vk = m_context.getDeviceInterface();
3409 const VkDevice device = m_context.getDevice();
3410 Allocator& allocator = m_context.getAllocator();
3411 const VkFormat format = VK_FORMAT_R8G8B8A8_UNORM;
3412 const deUint32 pixelSize = tcu::getPixelSize(mapVkFormat(format));
3413
3414 DE_ASSERT((m_resource.getBuffer().size % pixelSize) == 0);
3415 m_imageExtent = get2DImageExtentWithSize(m_resource.getBuffer().size, pixelSize);
3416
3417 // Source data staging buffer
3418 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3419 vk, device, allocator, makeBufferCreateInfo(m_resource.getBuffer().size, VK_BUFFER_USAGE_TRANSFER_SRC_BIT), MemoryRequirement::HostVisible));
3420
3421 const Allocation& alloc = m_hostBuffer->getAllocation();
3422 fillPattern(alloc.getHostPtr(), m_resource.getBuffer().size);
3423 flushAlloc(vk, device, alloc);
3424
3425 // Source data image
3426 m_image = de::MovePtr<Image>(new Image(
3427 vk, device, allocator, makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_imageExtent, format, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT), MemoryRequirement::Any));
3428 }
3429
recordCommands(const VkCommandBuffer cmdBuffer)3430 void recordCommands (const VkCommandBuffer cmdBuffer)
3431 {
3432 const DeviceInterface& vk = m_context.getDeviceInterface();
3433 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_imageExtent, m_subresourceLayers);
3434 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3435
3436 // Host buffer -> Image
3437 {
3438 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3439 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3440 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3441 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3442 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3443 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3444 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3445 **m_image, // VkImage image
3446 m_subresourceRange // VkImageSubresourceRange subresourceRange
3447 );
3448 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3449 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3450
3451 vk.cmdCopyBufferToImage(cmdBuffer, **m_hostBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
3452 }
3453 // Image -> Resource
3454 {
3455 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3456 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3457 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3458 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3459 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3460 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout oldLayout
3461 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout newLayout
3462 **m_image, // VkImage image
3463 m_subresourceRange // VkImageSubresourceRange subresourceRange
3464 );
3465 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3466 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3467
3468 vk.cmdCopyImageToBuffer(cmdBuffer, **m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_resource.getBuffer().handle, 1u, ©Region);
3469 }
3470 }
3471
getInSyncInfo(void) const3472 SyncInfo getInSyncInfo (void) const
3473 {
3474 return emptySyncInfo;
3475 }
3476
getOutSyncInfo(void) const3477 SyncInfo getOutSyncInfo (void) const
3478 {
3479 const SyncInfo syncInfo =
3480 {
3481 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3482 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3483 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3484 };
3485 return syncInfo;
3486 }
3487
getData(void) const3488 Data getData (void) const
3489 {
3490 return getHostBufferData(m_context, *m_hostBuffer, m_resource.getBuffer().size);
3491 }
3492
setData(const Data & data)3493 void setData (const Data& data)
3494 {
3495 setHostBufferData(m_context, *m_hostBuffer, data);
3496 }
3497
3498 private:
3499 OperationContext& m_context;
3500 Resource& m_resource;
3501 const VkImageSubresourceRange m_subresourceRange;
3502 const VkImageSubresourceLayers m_subresourceLayers;
3503 de::MovePtr<Buffer> m_hostBuffer;
3504 de::MovePtr<Image> m_image;
3505 VkExtent3D m_imageExtent;
3506 };
3507
3508 class ReadImplementation : public Operation
3509 {
3510 public:
ReadImplementation(OperationContext & context,Resource & resource)3511 ReadImplementation (OperationContext& context, Resource& resource)
3512 : m_context (context)
3513 , m_resource (resource)
3514 , m_bufferSize (getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent))
3515 {
3516 DE_ASSERT(m_resource.getType() == RESOURCE_TYPE_IMAGE);
3517
3518 const DeviceInterface& vk = m_context.getDeviceInterface();
3519 const VkDevice device = m_context.getDevice();
3520 Allocator& allocator = m_context.getAllocator();
3521
3522 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
3523 vk, device, allocator, makeBufferCreateInfo(m_bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible));
3524
3525 const Allocation& alloc = m_hostBuffer->getAllocation();
3526 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_bufferSize));
3527 flushAlloc(vk, device, alloc);
3528 }
3529
recordCommands(const VkCommandBuffer cmdBuffer)3530 void recordCommands (const VkCommandBuffer cmdBuffer)
3531 {
3532 const DeviceInterface& vk = m_context.getDeviceInterface();
3533 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_resource.getImage().extent, m_resource.getImage().subresourceLayers);
3534 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3535
3536 vk.cmdCopyImageToBuffer(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, **m_hostBuffer, 1u, ©Region);
3537
3538 // Insert a barrier so data written by the transfer is available to the host
3539 {
3540 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3541 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3542 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
3543 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3544 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3545 **m_hostBuffer, // VkBuffer buffer
3546 0u, // VkDeviceSize offset
3547 VK_WHOLE_SIZE // VkDeviceSize size
3548 );
3549 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
3550 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3551 }
3552 }
3553
getInSyncInfo(void) const3554 SyncInfo getInSyncInfo (void) const
3555 {
3556 const SyncInfo syncInfo =
3557 {
3558 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3559 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3560 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, // VkImageLayout imageLayout;
3561 };
3562 return syncInfo;
3563 }
3564
getOutSyncInfo(void) const3565 SyncInfo getOutSyncInfo (void) const
3566 {
3567 return emptySyncInfo;
3568 }
3569
getData(void) const3570 Data getData (void) const
3571 {
3572 return getHostBufferData(m_context, *m_hostBuffer, m_bufferSize);
3573 }
3574
setData(const Data &)3575 void setData (const Data&)
3576 {
3577 DE_ASSERT(0);
3578 }
3579
3580 private:
3581 OperationContext& m_context;
3582 Resource& m_resource;
3583 de::MovePtr<Buffer> m_hostBuffer;
3584 const VkDeviceSize m_bufferSize;
3585 };
3586
3587 class CopyImplementation : public Operation
3588 {
3589 public:
CopyImplementation(OperationContext & context,Resource & inResource,Resource & outResource)3590 CopyImplementation (OperationContext& context, Resource& inResource, Resource& outResource)
3591 : m_context (context)
3592 , m_inResource (inResource)
3593 , m_outResource (outResource)
3594 , m_subresourceRange (makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u))
3595 , m_subresourceLayers (makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u))
3596 {
3597 DE_ASSERT(m_inResource.getType() == RESOURCE_TYPE_IMAGE);
3598 DE_ASSERT(m_outResource.getType() == RESOURCE_TYPE_BUFFER);
3599 }
3600
recordCommands(const VkCommandBuffer cmdBuffer)3601 void recordCommands (const VkCommandBuffer cmdBuffer)
3602 {
3603 const DeviceInterface& vk = m_context.getDeviceInterface();
3604 const VkBufferImageCopy copyRegion = makeBufferImageCopy(m_inResource.getImage().extent, m_subresourceLayers);
3605 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3606
3607 {
3608 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3609 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3610 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3611 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3612 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3613 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3614 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3615 m_inResource.getImage().handle, // VkImage image
3616 m_inResource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3617 );
3618 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
3619 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3620 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3621 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3622 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3623 m_outResource.getBuffer().handle, // VkBuffer buffer
3624 0u, // VkDeviceSize offset
3625 m_outResource.getBuffer().size // VkDeviceSize size
3626 );
3627 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2, &imageMemoryBarrier2);
3628 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3629 }
3630
3631 vk.cmdCopyImageToBuffer(cmdBuffer, m_inResource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_outResource.getBuffer().handle, 1u, ©Region);
3632 }
3633
getInSyncInfo(void) const3634 SyncInfo getInSyncInfo (void) const
3635 {
3636 const SyncInfo syncInfo =
3637 {
3638 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3639 VK_ACCESS_2_TRANSFER_READ_BIT_KHR, // VkAccessFlags accessMask;
3640 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3641 };
3642 return syncInfo;
3643 }
3644
getOutSyncInfo(void) const3645 SyncInfo getOutSyncInfo (void) const
3646 {
3647 const SyncInfo syncInfo =
3648 {
3649 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
3650 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3651 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
3652 };
3653 return syncInfo;
3654 }
3655
getData(void) const3656 Data getData (void) const
3657 {
3658 Data data = { 0, DE_NULL };
3659 return data;
3660 }
3661
setData(const Data &)3662 void setData (const Data&)
3663 {
3664 DE_ASSERT(0);
3665 }
3666
3667 private:
3668 OperationContext& m_context;
3669 Resource& m_inResource;
3670 Resource& m_outResource;
3671 const VkImageSubresourceRange m_subresourceRange;
3672 const VkImageSubresourceLayers m_subresourceLayers;
3673 };
3674
3675 class Support : public OperationSupport
3676 {
3677 public:
Support(const ResourceDescription & resourceDesc,const AccessMode mode)3678 Support (const ResourceDescription& resourceDesc, const AccessMode mode)
3679 : m_mode (mode)
3680 , m_requiredQueueFlags (resourceDesc.type == RESOURCE_TYPE_IMAGE && isDepthStencilFormat(resourceDesc.imageFormat) ? VK_QUEUE_GRAPHICS_BIT : VK_QUEUE_TRANSFER_BIT)
3681 {
3682 DE_ASSERT(m_mode == ACCESS_MODE_READ || m_mode == ACCESS_MODE_WRITE);
3683 DE_ASSERT(m_mode == ACCESS_MODE_READ || resourceDesc.type != RESOURCE_TYPE_IMAGE);
3684 DE_ASSERT(m_mode == ACCESS_MODE_WRITE || resourceDesc.type != RESOURCE_TYPE_BUFFER);
3685 }
3686
getInResourceUsageFlags(void) const3687 deUint32 getInResourceUsageFlags (void) const
3688 {
3689 return m_mode == ACCESS_MODE_READ ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT : 0;
3690 }
3691
getOutResourceUsageFlags(void) const3692 deUint32 getOutResourceUsageFlags (void) const
3693 {
3694 return m_mode == ACCESS_MODE_WRITE ? VK_BUFFER_USAGE_TRANSFER_DST_BIT : 0;
3695 }
3696
getQueueFlags(const OperationContext & context) const3697 VkQueueFlags getQueueFlags (const OperationContext& context) const
3698 {
3699 DE_UNREF(context);
3700 return m_requiredQueueFlags;
3701 }
3702
build(OperationContext & context,Resource & resource) const3703 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3704 {
3705 if (m_mode == ACCESS_MODE_READ)
3706 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
3707 else
3708 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
3709 }
3710
build(OperationContext &,Resource &,Resource &) const3711 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
3712 {
3713 DE_ASSERT(0);
3714 return de::MovePtr<Operation>();
3715 }
3716
3717 private:
3718 const AccessMode m_mode;
3719 const VkQueueFlags m_requiredQueueFlags;
3720 };
3721
3722 } // CopyImageToBuffer ns
3723
3724 namespace ClearImage
3725 {
3726
3727 enum ClearMode
3728 {
3729 CLEAR_MODE_COLOR,
3730 CLEAR_MODE_DEPTH_STENCIL,
3731 };
3732
3733 class Implementation : public Operation
3734 {
3735 public:
Implementation(OperationContext & context,Resource & resource,const ClearMode mode)3736 Implementation (OperationContext& context, Resource& resource, const ClearMode mode)
3737 : m_context (context)
3738 , m_resource (resource)
3739 , m_clearValue (makeClearValue(m_resource.getImage().format))
3740 , m_mode (mode)
3741 {
3742 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
3743 const VkExtent3D& extent = m_resource.getImage().extent;
3744 const VkFormat format = m_resource.getImage().format;
3745 const tcu::TextureFormat texFormat = mapVkFormat(format);
3746
3747 m_data.resize(static_cast<std::size_t>(size));
3748 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
3749 clearPixelBuffer(imagePixels, m_clearValue);
3750 }
3751
recordCommands(const VkCommandBuffer cmdBuffer)3752 void recordCommands (const VkCommandBuffer cmdBuffer)
3753 {
3754 const DeviceInterface& vk = m_context.getDeviceInterface();
3755 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3756
3757 VkPipelineStageFlags2KHR dstStageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
3758 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
3759 dstStageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
3760
3761 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3762 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3763 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3764 dstStageMask, // VkPipelineStageFlags2KHR dstStageMask
3765 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3766 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3767 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout newLayout
3768 m_resource.getImage().handle, // VkImage image
3769 m_resource.getImage().subresourceRange // VkImageSubresourceRange subresourceRange
3770 );
3771 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3772 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3773
3774 if (m_mode == CLEAR_MODE_COLOR)
3775 vk.cmdClearColorImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &m_clearValue.color, 1u, &m_resource.getImage().subresourceRange);
3776 else
3777 vk.cmdClearDepthStencilImage(cmdBuffer, m_resource.getImage().handle, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &m_clearValue.depthStencil, 1u, &m_resource.getImage().subresourceRange);
3778 }
3779
getInSyncInfo(void) const3780 SyncInfo getInSyncInfo (void) const
3781 {
3782 return emptySyncInfo;
3783 }
3784
getOutSyncInfo(void) const3785 SyncInfo getOutSyncInfo (void) const
3786 {
3787 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR;
3788 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
3789 stageMask = VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR;
3790
3791 return
3792 {
3793 stageMask, // VkPipelineStageFlags stageMask;
3794 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
3795 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, // VkImageLayout imageLayout;
3796 };
3797 }
3798
getData(void) const3799 Data getData (void) const
3800 {
3801 const Data data =
3802 {
3803 m_data.size(), // std::size_t size;
3804 &m_data[0], // const deUint8* data;
3805 };
3806 return data;
3807 }
3808
setData(const Data &)3809 void setData (const Data&)
3810 {
3811 DE_ASSERT(0);
3812 }
3813
3814 private:
3815 OperationContext& m_context;
3816 Resource& m_resource;
3817 std::vector<deUint8> m_data;
3818 const VkClearValue m_clearValue;
3819 const ClearMode m_mode;
3820 };
3821
3822 class Support : public OperationSupport
3823 {
3824 public:
Support(const ResourceDescription & resourceDesc,const ClearMode mode)3825 Support (const ResourceDescription& resourceDesc, const ClearMode mode)
3826 : m_resourceDesc (resourceDesc)
3827 , m_mode (mode)
3828 {
3829 DE_ASSERT(m_mode == CLEAR_MODE_COLOR || m_mode == CLEAR_MODE_DEPTH_STENCIL);
3830 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
3831 DE_ASSERT(m_resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT || (m_mode != CLEAR_MODE_COLOR));
3832 DE_ASSERT((m_resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) || (m_mode != CLEAR_MODE_DEPTH_STENCIL));
3833 }
3834
getInResourceUsageFlags(void) const3835 deUint32 getInResourceUsageFlags (void) const
3836 {
3837 return 0;
3838 }
3839
getOutResourceUsageFlags(void) const3840 deUint32 getOutResourceUsageFlags (void) const
3841 {
3842 return VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3843 }
3844
getQueueFlags(const OperationContext & context) const3845 VkQueueFlags getQueueFlags (const OperationContext& context) const
3846 {
3847 DE_UNREF(context);
3848 if (m_mode == CLEAR_MODE_COLOR)
3849 return VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT;
3850 else
3851 return VK_QUEUE_GRAPHICS_BIT;
3852 }
3853
build(OperationContext & context,Resource & resource) const3854 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
3855 {
3856 return de::MovePtr<Operation>(new Implementation(context, resource, m_mode));
3857 }
3858
build(OperationContext &,Resource &,Resource &) const3859 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
3860 {
3861 DE_ASSERT(0);
3862 return de::MovePtr<Operation>();
3863 }
3864
3865 private:
3866 const ResourceDescription m_resourceDesc;
3867 const ClearMode m_mode;
3868 };
3869
3870 } // ClearImage ns
3871
3872 namespace Draw
3873 {
3874
3875 enum DrawCall
3876 {
3877 DRAW_CALL_DRAW,
3878 DRAW_CALL_DRAW_INDEXED,
3879 DRAW_CALL_DRAW_INDIRECT,
3880 DRAW_CALL_DRAW_INDEXED_INDIRECT,
3881 };
3882
3883 //! A write operation that is a result of drawing to an image.
3884 //! \todo Add support for depth/stencil too?
3885 class Implementation : public Operation
3886 {
3887 public:
Implementation(OperationContext & context,Resource & resource,const DrawCall drawCall)3888 Implementation (OperationContext& context, Resource& resource, const DrawCall drawCall)
3889 : m_context (context)
3890 , m_resource (resource)
3891 , m_drawCall (drawCall)
3892 , m_vertices (context)
3893 {
3894 const DeviceInterface& vk = context.getDeviceInterface();
3895 const VkDevice device = context.getDevice();
3896 Allocator& allocator = context.getAllocator();
3897
3898 // Indirect buffer
3899
3900 if (m_drawCall == DRAW_CALL_DRAW_INDIRECT)
3901 {
3902 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
3903 makeBufferCreateInfo(sizeof(VkDrawIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
3904
3905 const Allocation& alloc = m_indirectBuffer->getAllocation();
3906 VkDrawIndirectCommand* const pIndirectCommand = static_cast<VkDrawIndirectCommand*>(alloc.getHostPtr());
3907
3908 pIndirectCommand->vertexCount = m_vertices.getNumVertices();
3909 pIndirectCommand->instanceCount = 1u;
3910 pIndirectCommand->firstVertex = 0u;
3911 pIndirectCommand->firstInstance = 0u;
3912
3913 flushAlloc(vk, device, alloc);
3914 }
3915 else if (m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
3916 {
3917 m_indirectBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
3918 makeBufferCreateInfo(sizeof(VkDrawIndexedIndirectCommand), VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT), MemoryRequirement::HostVisible));
3919
3920 const Allocation& alloc = m_indirectBuffer->getAllocation();
3921 VkDrawIndexedIndirectCommand* const pIndirectCommand = static_cast<VkDrawIndexedIndirectCommand*>(alloc.getHostPtr());
3922
3923 pIndirectCommand->indexCount = m_vertices.getNumIndices();
3924 pIndirectCommand->instanceCount = 1u;
3925 pIndirectCommand->firstIndex = 0u;
3926 pIndirectCommand->vertexOffset = 0u;
3927 pIndirectCommand->firstInstance = 0u;
3928
3929 flushAlloc(vk, device, alloc);
3930 }
3931
3932 // Resource image is the color attachment
3933
3934 m_colorFormat = m_resource.getImage().format;
3935 m_colorSubresourceRange = m_resource.getImage().subresourceRange;
3936 m_colorImage = m_resource.getImage().handle;
3937 m_attachmentExtent = m_resource.getImage().extent;
3938
3939 // Pipeline
3940
3941 m_colorAttachmentView = makeImageView (vk, device, m_colorImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorSubresourceRange);
3942 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
3943 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_attachmentExtent.width, m_attachmentExtent.height);
3944 m_pipelineLayout = makePipelineLayout(vk, device);
3945
3946 GraphicsPipelineBuilder pipelineBuilder;
3947 pipelineBuilder
3948 .setRenderSize (tcu::IVec2(m_attachmentExtent.width, m_attachmentExtent.height))
3949 .setVertexInputSingleAttribute (m_vertices.getVertexFormat(), m_vertices.getVertexStride())
3950 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("draw_vert"), DE_NULL)
3951 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("draw_frag"), DE_NULL);
3952
3953 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
3954
3955 // Set expected draw values
3956
3957 m_expectedData.resize(static_cast<size_t>(getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent)));
3958 tcu::PixelBufferAccess imagePixels(mapVkFormat(m_colorFormat), m_attachmentExtent.width, m_attachmentExtent.height, m_attachmentExtent.depth, &m_expectedData[0]);
3959 clearPixelBuffer(imagePixels, makeClearValue(m_colorFormat));
3960 }
3961
recordCommands(const VkCommandBuffer cmdBuffer)3962 void recordCommands (const VkCommandBuffer cmdBuffer)
3963 {
3964 const DeviceInterface& vk = m_context.getDeviceInterface();
3965 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
3966
3967 // Change color attachment image layout
3968 {
3969 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
3970 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
3971 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
3972 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
3973 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
3974 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
3975 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
3976 m_colorImage, // VkImage image
3977 m_colorSubresourceRange // VkImageSubresourceRange subresourceRange
3978 );
3979 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
3980 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
3981 }
3982
3983 {
3984 const VkRect2D renderArea = makeRect2D(m_attachmentExtent);
3985 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
3986
3987 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
3988 }
3989
3990 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
3991 {
3992 const VkDeviceSize vertexBufferOffset = 0ull;
3993 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
3994 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
3995 }
3996
3997 if (m_drawCall == DRAW_CALL_DRAW_INDEXED || m_drawCall == DRAW_CALL_DRAW_INDEXED_INDIRECT)
3998 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
3999
4000 switch (m_drawCall)
4001 {
4002 case DRAW_CALL_DRAW:
4003 vk.cmdDraw(cmdBuffer, m_vertices.getNumVertices(), 1u, 0u, 0u);
4004 break;
4005
4006 case DRAW_CALL_DRAW_INDEXED:
4007 vk.cmdDrawIndexed(cmdBuffer, m_vertices.getNumIndices(), 1u, 0u, 0, 0u);
4008 break;
4009
4010 case DRAW_CALL_DRAW_INDIRECT:
4011 vk.cmdDrawIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4012 break;
4013
4014 case DRAW_CALL_DRAW_INDEXED_INDIRECT:
4015 vk.cmdDrawIndexedIndirect(cmdBuffer, **m_indirectBuffer, 0u, 1u, 0u);
4016 break;
4017 }
4018
4019 endRenderPass(vk, cmdBuffer);
4020 }
4021
getInSyncInfo(void) const4022 SyncInfo getInSyncInfo (void) const
4023 {
4024 return emptySyncInfo;
4025 }
4026
getOutSyncInfo(void) const4027 SyncInfo getOutSyncInfo (void) const
4028 {
4029 const SyncInfo syncInfo =
4030 {
4031 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags stageMask;
4032 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4033 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout imageLayout;
4034 };
4035 return syncInfo;
4036 }
4037
getData(void) const4038 Data getData (void) const
4039 {
4040 const Data data =
4041 {
4042 m_expectedData.size(), // std::size_t size;
4043 &m_expectedData[0], // const deUint8* data;
4044 };
4045 return data;
4046 }
4047
setData(const Data & data)4048 void setData (const Data& data)
4049 {
4050 DE_ASSERT(m_expectedData.size() == data.size);
4051 deMemcpy(&m_expectedData[0], data.data, data.size);
4052 }
4053
4054 private:
4055 OperationContext& m_context;
4056 Resource& m_resource;
4057 const DrawCall m_drawCall;
4058 const VertexGrid m_vertices;
4059 std::vector<deUint8> m_expectedData;
4060 de::MovePtr<Buffer> m_indirectBuffer;
4061 VkFormat m_colorFormat;
4062 VkImage m_colorImage;
4063 Move<VkImageView> m_colorAttachmentView;
4064 VkImageSubresourceRange m_colorSubresourceRange;
4065 VkExtent3D m_attachmentExtent;
4066 Move<VkRenderPass> m_renderPass;
4067 Move<VkFramebuffer> m_framebuffer;
4068 Move<VkPipelineLayout> m_pipelineLayout;
4069 Move<VkPipeline> m_pipeline;
4070 };
4071
4072 template<typename T, std::size_t N>
toString(const T (& values)[N])4073 std::string toString (const T (&values)[N])
4074 {
4075 std::ostringstream str;
4076 for (std::size_t i = 0; i < N; ++i)
4077 str << (i != 0 ? ", " : "") << values[i];
4078 return str.str();
4079 }
4080
4081 class Support : public OperationSupport
4082 {
4083 public:
Support(const ResourceDescription & resourceDesc,const DrawCall drawCall)4084 Support (const ResourceDescription& resourceDesc, const DrawCall drawCall)
4085 : m_resourceDesc (resourceDesc)
4086 , m_drawCall (drawCall)
4087 {
4088 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE && m_resourceDesc.imageType == VK_IMAGE_TYPE_2D);
4089 DE_ASSERT(!isDepthStencilFormat(m_resourceDesc.imageFormat));
4090 }
4091
initPrograms(SourceCollections & programCollection) const4092 void initPrograms (SourceCollections& programCollection) const
4093 {
4094 // Vertex
4095 {
4096 std::ostringstream src;
4097 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4098 << "\n"
4099 << "layout(location = 0) in vec4 v_in_position;\n"
4100 << "\n"
4101 << "out " << s_perVertexBlock << ";\n"
4102 << "\n"
4103 << "void main (void)\n"
4104 << "{\n"
4105 << " gl_Position = v_in_position;\n"
4106 << "}\n";
4107
4108 programCollection.glslSources.add("draw_vert") << glu::VertexSource(src.str());
4109 }
4110
4111 // Fragment
4112 {
4113 const VkClearValue clearValue = makeClearValue(m_resourceDesc.imageFormat);
4114 const bool isIntegerFormat = isIntFormat(m_resourceDesc.imageFormat) || isUintFormat(m_resourceDesc.imageFormat);
4115 const std::string colorType = (isIntegerFormat ? "uvec4" : "vec4");
4116
4117 std::ostringstream src;
4118 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4119 << "\n"
4120 << "layout(location = 0) out " << colorType << " o_color;\n"
4121 << "\n"
4122 << "void main (void)\n"
4123 << "{\n"
4124 << " o_color = " << colorType << "(" << (isIntegerFormat ? toString(clearValue.color.uint32) : toString(clearValue.color.float32)) << ");\n"
4125 << "}\n";
4126
4127 programCollection.glslSources.add("draw_frag") << glu::FragmentSource(src.str());
4128 }
4129 }
4130
getInResourceUsageFlags(void) const4131 deUint32 getInResourceUsageFlags (void) const
4132 {
4133 return 0;
4134 }
4135
getOutResourceUsageFlags(void) const4136 deUint32 getOutResourceUsageFlags (void) const
4137 {
4138 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4139 }
4140
getQueueFlags(const OperationContext & context) const4141 VkQueueFlags getQueueFlags (const OperationContext& context) const
4142 {
4143 DE_UNREF(context);
4144 return VK_QUEUE_GRAPHICS_BIT;
4145 }
4146
build(OperationContext & context,Resource & resource) const4147 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4148 {
4149 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawCall));
4150 }
4151
build(OperationContext &,Resource &,Resource &) const4152 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4153 {
4154 DE_ASSERT(0);
4155 return de::MovePtr<Operation>();
4156 }
4157
4158 private:
4159 const ResourceDescription m_resourceDesc;
4160 const DrawCall m_drawCall;
4161 };
4162
4163 } // Draw ns
4164
4165 namespace ClearAttachments
4166 {
4167
4168 class Implementation : public Operation
4169 {
4170 public:
Implementation(OperationContext & context,Resource & resource)4171 Implementation (OperationContext& context, Resource& resource)
4172 : m_context (context)
4173 , m_resource (resource)
4174 , m_clearValue (makeClearValue(m_resource.getImage().format))
4175 {
4176 const DeviceInterface& vk = context.getDeviceInterface();
4177 const VkDevice device = context.getDevice();
4178
4179 const VkDeviceSize size = getPixelBufferSize(m_resource.getImage().format, m_resource.getImage().extent);
4180 const VkExtent3D& extent = m_resource.getImage().extent;
4181 const VkFormat format = m_resource.getImage().format;
4182 const tcu::TextureFormat texFormat = mapVkFormat(format);
4183 const SyncInfo syncInfo = getOutSyncInfo();
4184
4185 m_data.resize(static_cast<std::size_t>(size));
4186 tcu::PixelBufferAccess imagePixels(texFormat, extent.width, extent.height, extent.depth, &m_data[0]);
4187 clearPixelBuffer(imagePixels, m_clearValue);
4188
4189 m_attachmentView = makeImageView(vk, device, m_resource.getImage().handle, getImageViewType(m_resource.getImage().imageType), m_resource.getImage().format, m_resource.getImage().subresourceRange);
4190
4191 switch (m_resource.getImage().subresourceRange.aspectMask)
4192 {
4193 case VK_IMAGE_ASPECT_COLOR_BIT:
4194 m_renderPass = makeRenderPass(vk, device, m_resource.getImage().format, VK_FORMAT_UNDEFINED, VK_ATTACHMENT_LOAD_OP_DONT_CARE, syncInfo.imageLayout);
4195 break;
4196 case VK_IMAGE_ASPECT_STENCIL_BIT:
4197 case VK_IMAGE_ASPECT_DEPTH_BIT:
4198 m_renderPass = makeRenderPass(vk, device, VK_FORMAT_UNDEFINED, m_resource.getImage().format, VK_ATTACHMENT_LOAD_OP_DONT_CARE, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, syncInfo.imageLayout);
4199 break;
4200 default:
4201 DE_ASSERT(0);
4202 break;
4203 }
4204
4205 m_frameBuffer = makeFramebuffer(vk, device, *m_renderPass, *m_attachmentView, m_resource.getImage().extent.width, m_resource.getImage().extent.height);
4206 }
4207
recordCommands(const VkCommandBuffer cmdBuffer)4208 void recordCommands (const VkCommandBuffer cmdBuffer)
4209 {
4210 const DeviceInterface& vk = m_context.getDeviceInterface();
4211 if ((m_resource.getImage().subresourceRange.aspectMask & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) != 0)
4212 {
4213 const VkImageMemoryBarrier imageBarrier =
4214 {
4215 VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, // sType
4216 DE_NULL, // pNext
4217 0u, // srcAccessMask
4218 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT, // dstAccessMask
4219 VK_IMAGE_LAYOUT_UNDEFINED, // oldLayout
4220 VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, // newLayout
4221 VK_QUEUE_FAMILY_IGNORED, // srcQueueFamilyIndex
4222 VK_QUEUE_FAMILY_IGNORED, // dstQueueFamilyIndex
4223 m_resource.getImage().handle, // image
4224 m_resource.getImage().subresourceRange // subresourceRange
4225 };
4226 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT, 0u, 0u, DE_NULL, 0u, DE_NULL, 1u, &imageBarrier);
4227 }
4228 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_frameBuffer, makeRect2D(0 ,0, m_resource.getImage().extent.width, m_resource.getImage().extent.height), m_clearValue);
4229
4230 const VkClearAttachment clearAttachment =
4231 {
4232 m_resource.getImage().subresourceRange.aspectMask, // VkImageAspectFlags aspectMask;
4233 0, // deUint32 colorAttachment;
4234 m_clearValue // VkClearValue clearValue;
4235 };
4236
4237 const VkRect2D rect2D = makeRect2D(m_resource.getImage().extent);
4238
4239 const VkClearRect clearRect =
4240 {
4241 rect2D, // VkRect2D rect;
4242 0u, // deUint32 baseArrayLayer;
4243 m_resource.getImage().subresourceLayers.layerCount // deUint32 layerCount;
4244 };
4245
4246 vk.cmdClearAttachments(cmdBuffer, 1, &clearAttachment, 1, &clearRect);
4247
4248 endRenderPass(vk, cmdBuffer);
4249 }
4250
getInSyncInfo(void) const4251 SyncInfo getInSyncInfo (void) const
4252 {
4253 return emptySyncInfo;
4254 }
4255
getOutSyncInfo(void) const4256 SyncInfo getOutSyncInfo (void) const
4257 {
4258 SyncInfo syncInfo;
4259 syncInfo.stageMask = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR;
4260
4261 switch (m_resource.getImage().subresourceRange.aspectMask)
4262 {
4263 case VK_IMAGE_ASPECT_COLOR_BIT:
4264 syncInfo.accessMask = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR;
4265 syncInfo.imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
4266 break;
4267 case VK_IMAGE_ASPECT_STENCIL_BIT:
4268 case VK_IMAGE_ASPECT_DEPTH_BIT:
4269 syncInfo.accessMask = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR;
4270 syncInfo.imageLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
4271 break;
4272 default:
4273 DE_ASSERT(0);
4274 break;
4275 }
4276
4277 return syncInfo;
4278 }
4279
getData(void) const4280 Data getData (void) const
4281 {
4282 const Data data =
4283 {
4284 m_data.size(), // std::size_t size;
4285 &m_data[0], // const deUint8* data;
4286 };
4287 return data;
4288 }
4289
setData(const Data &)4290 void setData (const Data&)
4291 {
4292 DE_ASSERT(0);
4293 }
4294
4295 private:
4296 OperationContext& m_context;
4297 Resource& m_resource;
4298 std::vector<deUint8> m_data;
4299 const VkClearValue m_clearValue;
4300 Move<VkImageView> m_attachmentView;
4301 Move<VkRenderPass> m_renderPass;
4302 Move<VkFramebuffer> m_frameBuffer;
4303 };
4304
4305 class Support : public OperationSupport
4306 {
4307 public:
Support(const ResourceDescription & resourceDesc)4308 Support (const ResourceDescription& resourceDesc)
4309 : m_resourceDesc (resourceDesc)
4310 {
4311 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_IMAGE);
4312 }
4313
getInResourceUsageFlags(void) const4314 deUint32 getInResourceUsageFlags (void) const
4315 {
4316 return 0;
4317 }
4318
getOutResourceUsageFlags(void) const4319 deUint32 getOutResourceUsageFlags (void) const
4320 {
4321 switch (m_resourceDesc.imageAspect)
4322 {
4323 case VK_IMAGE_ASPECT_COLOR_BIT:
4324 return VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
4325 case VK_IMAGE_ASPECT_STENCIL_BIT:
4326 case VK_IMAGE_ASPECT_DEPTH_BIT:
4327 return VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
4328 default:
4329 DE_ASSERT(0);
4330 }
4331 return 0u;
4332 }
4333
getQueueFlags(const OperationContext & context) const4334 VkQueueFlags getQueueFlags (const OperationContext& context) const
4335 {
4336 DE_UNREF(context);
4337 return VK_QUEUE_GRAPHICS_BIT;
4338 }
4339
build(OperationContext & context,Resource & resource) const4340 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4341 {
4342 return de::MovePtr<Operation>(new Implementation(context, resource));
4343 }
4344
build(OperationContext &,Resource &,Resource &) const4345 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4346 {
4347 DE_ASSERT(0);
4348 return de::MovePtr<Operation>();
4349 }
4350
4351 private:
4352 const ResourceDescription m_resourceDesc;
4353 };
4354
4355 } // ClearAttachments
4356
4357 namespace IndirectBuffer
4358 {
4359
4360 class GraphicsPipeline : public Pipeline
4361 {
4362 public:
GraphicsPipeline(OperationContext & context,const ResourceType resourceType,const VkBuffer indirectBuffer,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)4363 GraphicsPipeline (OperationContext& context,
4364 const ResourceType resourceType,
4365 const VkBuffer indirectBuffer,
4366 const std::string& shaderPrefix,
4367 const VkDescriptorSetLayout descriptorSetLayout)
4368 : m_resourceType (resourceType)
4369 , m_indirectBuffer (indirectBuffer)
4370 , m_vertices (context)
4371 {
4372 const DeviceInterface& vk = context.getDeviceInterface();
4373 const VkDevice device = context.getDevice();
4374 Allocator& allocator = context.getAllocator();
4375
4376 // Color attachment
4377
4378 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
4379 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
4380 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
4381 m_colorAttachmentImage = de::MovePtr<Image>(new Image(vk, device, allocator,
4382 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),
4383 MemoryRequirement::Any));
4384
4385 // Pipeline
4386
4387 m_colorAttachmentView = makeImageView (vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
4388 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
4389 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height);
4390 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4391
4392 GraphicsPipelineBuilder pipelineBuilder;
4393 pipelineBuilder
4394 .setRenderSize (tcu::IVec2(m_colorImageExtent.width, m_colorImageExtent.height))
4395 .setVertexInputSingleAttribute (m_vertices.getVertexFormat(), m_vertices.getVertexStride())
4396 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get(shaderPrefix + "vert"), DE_NULL)
4397 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get(shaderPrefix + "frag"), DE_NULL);
4398
4399 m_pipeline = pipelineBuilder.build(vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
4400 }
4401
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)4402 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4403 {
4404 const DeviceInterface& vk = context.getDeviceInterface();
4405 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(context.getSynchronizationType(), vk, DE_FALSE);
4406
4407 // Change color attachment image layout
4408 {
4409 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4410 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4411 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4412 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4413 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4414 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4415 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4416 **m_colorAttachmentImage, // VkImage image
4417 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
4418 );
4419 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
4420 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4421 }
4422
4423 {
4424 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
4425 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4426
4427 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4428 }
4429
4430 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4431 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
4432 {
4433 const VkDeviceSize vertexBufferOffset = 0ull;
4434 const VkBuffer vertexBuffer = m_vertices.getVertexBuffer();
4435 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &vertexBuffer, &vertexBufferOffset);
4436 }
4437
4438 switch (m_resourceType)
4439 {
4440 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
4441 vk.cmdDrawIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4442 break;
4443
4444 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
4445 vk.cmdBindIndexBuffer(cmdBuffer, m_vertices.getIndexBuffer(), 0u, m_vertices.getIndexType());
4446 vk.cmdDrawIndexedIndirect(cmdBuffer, m_indirectBuffer, 0u, 1u, 0u);
4447 break;
4448
4449 default:
4450 DE_ASSERT(0);
4451 break;
4452 }
4453 endRenderPass(vk, cmdBuffer);
4454 }
4455
4456 private:
4457 const ResourceType m_resourceType;
4458 const VkBuffer m_indirectBuffer;
4459 const VertexGrid m_vertices;
4460 VkFormat m_colorFormat;
4461 de::MovePtr<Image> m_colorAttachmentImage;
4462 Move<VkImageView> m_colorAttachmentView;
4463 VkExtent3D m_colorImageExtent;
4464 VkImageSubresourceRange m_colorImageSubresourceRange;
4465 Move<VkRenderPass> m_renderPass;
4466 Move<VkFramebuffer> m_framebuffer;
4467 Move<VkPipelineLayout> m_pipelineLayout;
4468 Move<VkPipeline> m_pipeline;
4469 };
4470
4471 class ComputePipeline : public Pipeline
4472 {
4473 public:
ComputePipeline(OperationContext & context,const VkBuffer indirectBuffer,const std::string & shaderPrefix,const VkDescriptorSetLayout descriptorSetLayout)4474 ComputePipeline (OperationContext& context,
4475 const VkBuffer indirectBuffer,
4476 const std::string& shaderPrefix,
4477 const VkDescriptorSetLayout descriptorSetLayout)
4478 : m_indirectBuffer (indirectBuffer)
4479 {
4480 const DeviceInterface& vk = context.getDeviceInterface();
4481 const VkDevice device = context.getDevice();
4482
4483 const Unique<VkShaderModule> shaderModule(createShaderModule(vk, device, context.getBinaryCollection().get(shaderPrefix + "comp"), (VkShaderModuleCreateFlags)0));
4484
4485 m_pipelineLayout = makePipelineLayout(vk, device, descriptorSetLayout);
4486 m_pipeline = makeComputePipeline(vk, device, *m_pipelineLayout, *shaderModule, DE_NULL, context.getPipelineCacheData());
4487 }
4488
recordCommands(OperationContext & context,const VkCommandBuffer cmdBuffer,const VkDescriptorSet descriptorSet)4489 void recordCommands (OperationContext& context, const VkCommandBuffer cmdBuffer, const VkDescriptorSet descriptorSet)
4490 {
4491 const DeviceInterface& vk = context.getDeviceInterface();
4492
4493 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipeline);
4494 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *m_pipelineLayout, 0u, 1u, &descriptorSet, 0u, DE_NULL);
4495 vk.cmdDispatchIndirect(cmdBuffer, m_indirectBuffer, 0u);
4496 }
4497
4498 private:
4499 const VkBuffer m_indirectBuffer;
4500 Move<VkPipelineLayout> m_pipelineLayout;
4501 Move<VkPipeline> m_pipeline;
4502 };
4503
4504 //! Read indirect buffer by executing an indirect draw or dispatch command.
4505 class ReadImplementation : public Operation
4506 {
4507 public:
ReadImplementation(OperationContext & context,Resource & resource)4508 ReadImplementation (OperationContext& context, Resource& resource)
4509 : m_context (context)
4510 , m_resource (resource)
4511 , m_stage (resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_SHADER_STAGE_COMPUTE_BIT : VK_SHADER_STAGE_VERTEX_BIT)
4512 , m_pipelineStage (pipelineStageFlagsFromShaderStageFlagBits(m_stage))
4513 , m_hostBufferSizeBytes (sizeof(deUint32))
4514 {
4515 requireFeaturesForSSBOAccess (m_context, m_stage);
4516
4517 const DeviceInterface& vk = m_context.getDeviceInterface();
4518 const VkDevice device = m_context.getDevice();
4519 Allocator& allocator = m_context.getAllocator();
4520
4521 m_hostBuffer = de::MovePtr<Buffer>(new Buffer(
4522 vk, device, allocator, makeBufferCreateInfo(m_hostBufferSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
4523
4524 // Init host buffer data
4525 {
4526 const Allocation& alloc = m_hostBuffer->getAllocation();
4527 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(m_hostBufferSizeBytes));
4528 flushAlloc(vk, device, alloc);
4529 }
4530
4531 // Prepare descriptors
4532 {
4533 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
4534 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, m_stage)
4535 .build(vk, device);
4536
4537 m_descriptorPool = DescriptorPoolBuilder()
4538 .addType(VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
4539 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
4540
4541 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
4542
4543 const VkDescriptorBufferInfo hostBufferInfo = makeDescriptorBufferInfo(**m_hostBuffer, 0u, m_hostBufferSizeBytes);
4544
4545 DescriptorSetUpdateBuilder()
4546 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &hostBufferInfo)
4547 .update(vk, device);
4548 }
4549
4550 // Create pipeline
4551 m_pipeline = (m_resource.getType() == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH
4552 ? de::MovePtr<Pipeline>(new ComputePipeline(context, m_resource.getBuffer().handle, "read_ib_", *m_descriptorSetLayout))
4553 : de::MovePtr<Pipeline>(new GraphicsPipeline(context, m_resource.getType(), m_resource.getBuffer().handle, "read_ib_", *m_descriptorSetLayout)));
4554 }
4555
recordCommands(const VkCommandBuffer cmdBuffer)4556 void recordCommands (const VkCommandBuffer cmdBuffer)
4557 {
4558 const DeviceInterface& vk = m_context.getDeviceInterface();
4559 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
4560
4561 m_pipeline->recordCommands(m_context, cmdBuffer, *m_descriptorSet);
4562
4563 // Insert a barrier so data written by the shader is available to the host
4564 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
4565 m_pipelineStage, // VkPipelineStageFlags2KHR srcStageMask
4566 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
4567 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4568 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4569 **m_hostBuffer, // VkBuffer buffer
4570 0u, // VkDeviceSize offset
4571 m_hostBufferSizeBytes // VkDeviceSize size
4572 );
4573 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
4574 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4575 }
4576
getInSyncInfo(void) const4577 SyncInfo getInSyncInfo (void) const
4578 {
4579 const SyncInfo syncInfo =
4580 {
4581 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR, // VkPipelineStageFlags stageMask;
4582 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR, // VkAccessFlags accessMask;
4583 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4584 };
4585 return syncInfo;
4586 }
4587
getOutSyncInfo(void) const4588 SyncInfo getOutSyncInfo (void) const
4589 {
4590 return emptySyncInfo;
4591 }
4592
getData(void) const4593 Data getData (void) const
4594 {
4595 return getHostBufferData(m_context, *m_hostBuffer, m_hostBufferSizeBytes);
4596 }
4597
setData(const Data &)4598 void setData (const Data&)
4599 {
4600 DE_ASSERT(0);
4601 }
4602
4603 private:
4604 OperationContext& m_context;
4605 Resource& m_resource;
4606 const VkShaderStageFlagBits m_stage;
4607 const VkPipelineStageFlags m_pipelineStage;
4608 const VkDeviceSize m_hostBufferSizeBytes;
4609 de::MovePtr<Buffer> m_hostBuffer;
4610 Move<VkDescriptorPool> m_descriptorPool;
4611 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
4612 Move<VkDescriptorSet> m_descriptorSet;
4613 de::MovePtr<Pipeline> m_pipeline;
4614 };
4615
4616 //! Prepare indirect buffer for a draw/dispatch call.
4617 class WriteImplementation : public Operation
4618 {
4619 public:
WriteImplementation(OperationContext & context,Resource & resource)4620 WriteImplementation (OperationContext& context, Resource& resource)
4621 : m_context (context)
4622 , m_resource (resource)
4623 {
4624 switch (m_resource.getType())
4625 {
4626 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW:
4627 {
4628 m_drawIndirect.vertexCount = 6u;
4629 m_drawIndirect.instanceCount = 1u;
4630 m_drawIndirect.firstVertex = 0u;
4631 m_drawIndirect.firstInstance = 0u;
4632
4633 m_indirectData = reinterpret_cast<deUint32*>(&m_drawIndirect);
4634 m_expectedValue = 6u;
4635 }
4636 break;
4637
4638 case RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED:
4639 {
4640 m_drawIndexedIndirect.indexCount = 6u;
4641 m_drawIndexedIndirect.instanceCount = 1u;
4642 m_drawIndexedIndirect.firstIndex = 0u;
4643 m_drawIndexedIndirect.vertexOffset = 0u;
4644 m_drawIndexedIndirect.firstInstance = 0u;
4645
4646 m_indirectData = reinterpret_cast<deUint32*>(&m_drawIndexedIndirect);
4647 m_expectedValue = 6u;
4648 }
4649 break;
4650
4651 case RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH:
4652 {
4653 m_dispatchIndirect.x = 7u;
4654 m_dispatchIndirect.y = 2u;
4655 m_dispatchIndirect.z = 1u;
4656
4657 m_indirectData = reinterpret_cast<deUint32*>(&m_dispatchIndirect);
4658 m_expectedValue = 14u;
4659 }
4660 break;
4661
4662 default:
4663 DE_ASSERT(0);
4664 break;
4665 }
4666 }
4667
recordCommands(const VkCommandBuffer cmdBuffer)4668 void recordCommands (const VkCommandBuffer cmdBuffer)
4669 {
4670 const DeviceInterface& vk = m_context.getDeviceInterface();
4671
4672 vk.cmdUpdateBuffer(cmdBuffer, m_resource.getBuffer().handle, m_resource.getBuffer().offset, m_resource.getBuffer().size, m_indirectData);
4673 }
4674
getInSyncInfo(void) const4675 SyncInfo getInSyncInfo (void) const
4676 {
4677 return emptySyncInfo;
4678 }
4679
getOutSyncInfo(void) const4680 SyncInfo getOutSyncInfo (void) const
4681 {
4682 const SyncInfo syncInfo =
4683 {
4684 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR, // VkPipelineStageFlags stageMask;
4685 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR, // VkAccessFlags accessMask;
4686 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
4687 };
4688 return syncInfo;
4689 }
4690
getData(void) const4691 Data getData (void) const
4692 {
4693 const Data data =
4694 {
4695 sizeof(deUint32), // std::size_t size;
4696 reinterpret_cast<const deUint8*>(&m_expectedValue), // const deUint8* data;
4697 };
4698 return data;
4699 }
4700
setData(const Data &)4701 void setData (const Data&)
4702 {
4703 DE_ASSERT(0);
4704 }
4705
4706 private:
4707 OperationContext& m_context;
4708 Resource& m_resource;
4709 VkDrawIndirectCommand m_drawIndirect;
4710 VkDrawIndexedIndirectCommand m_drawIndexedIndirect;
4711 VkDispatchIndirectCommand m_dispatchIndirect;
4712 deUint32* m_indirectData;
4713 deUint32 m_expectedValue; //! Side-effect value expected to be computed by a read (draw/dispatch) command.
4714 };
4715
4716 class ReadSupport : public OperationSupport
4717 {
4718 public:
ReadSupport(const ResourceDescription & resourceDesc)4719 ReadSupport (const ResourceDescription& resourceDesc)
4720 : m_resourceDesc (resourceDesc)
4721 {
4722 DE_ASSERT(isIndirectBuffer(m_resourceDesc.type));
4723 }
4724
initPrograms(SourceCollections & programCollection) const4725 void initPrograms (SourceCollections& programCollection) const
4726 {
4727 std::ostringstream decl;
4728 decl << "layout(set = 0, binding = 0, std140) coherent buffer Data {\n"
4729 << " uint value;\n"
4730 << "} sb_out;\n";
4731
4732 std::ostringstream main;
4733 main << " atomicAdd(sb_out.value, 1u);\n";
4734
4735 // Vertex
4736 {
4737 std::ostringstream src;
4738 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4739 << "\n"
4740 << "layout(location = 0) in vec4 v_in_position;\n"
4741 << "\n"
4742 << "out " << s_perVertexBlock << ";\n"
4743 << "\n"
4744 << decl.str()
4745 << "\n"
4746 << "void main (void)\n"
4747 << "{\n"
4748 << " gl_Position = v_in_position;\n"
4749 << main.str()
4750 << "}\n";
4751
4752 programCollection.glslSources.add("read_ib_vert") << glu::VertexSource(src.str());
4753 }
4754
4755 // Fragment
4756 {
4757 std::ostringstream src;
4758 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4759 << "\n"
4760 << "layout(location = 0) out vec4 o_color;\n"
4761 << "\n"
4762 << "void main (void)\n"
4763 << "{\n"
4764 << " o_color = vec4(1.0);\n"
4765 << "}\n";
4766
4767 programCollection.glslSources.add("read_ib_frag") << glu::FragmentSource(src.str());
4768 }
4769
4770 // Compute
4771 {
4772 std::ostringstream src;
4773 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
4774 << "\n"
4775 << "layout(local_size_x = 1) in;\n"
4776 << "\n"
4777 << decl.str()
4778 << "\n"
4779 << "void main (void)\n"
4780 << "{\n"
4781 << main.str()
4782 << "}\n";
4783
4784 programCollection.glslSources.add("read_ib_comp") << glu::ComputeSource(src.str());
4785 }
4786 }
4787
getInResourceUsageFlags(void) const4788 deUint32 getInResourceUsageFlags (void) const
4789 {
4790 return VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
4791 }
4792
getOutResourceUsageFlags(void) const4793 deUint32 getOutResourceUsageFlags (void) const
4794 {
4795 return 0;
4796 }
4797
getQueueFlags(const OperationContext & context) const4798 VkQueueFlags getQueueFlags (const OperationContext& context) const
4799 {
4800 DE_UNREF(context);
4801 return (m_resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH ? VK_QUEUE_COMPUTE_BIT : VK_QUEUE_GRAPHICS_BIT);
4802 }
4803
build(OperationContext & context,Resource & resource) const4804 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4805 {
4806 return de::MovePtr<Operation>(new ReadImplementation(context, resource));
4807 }
4808
build(OperationContext &,Resource &,Resource &) const4809 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4810 {
4811 DE_ASSERT(0);
4812 return de::MovePtr<Operation>();
4813 }
4814
4815 private:
4816 const ResourceDescription m_resourceDesc;
4817 };
4818
4819
4820 class WriteSupport : public OperationSupport
4821 {
4822 public:
WriteSupport(const ResourceDescription & resourceDesc)4823 WriteSupport (const ResourceDescription& resourceDesc)
4824 {
4825 DE_ASSERT(isIndirectBuffer(resourceDesc.type));
4826 DE_UNREF(resourceDesc);
4827 }
4828
getInResourceUsageFlags(void) const4829 deUint32 getInResourceUsageFlags (void) const
4830 {
4831 return 0;
4832 }
4833
getOutResourceUsageFlags(void) const4834 deUint32 getOutResourceUsageFlags (void) const
4835 {
4836 return VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4837 }
4838
getQueueFlags(const OperationContext & context) const4839 VkQueueFlags getQueueFlags (const OperationContext& context) const
4840 {
4841 DE_UNREF(context);
4842 return VK_QUEUE_TRANSFER_BIT;
4843 }
4844
build(OperationContext & context,Resource & resource) const4845 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
4846 {
4847 return de::MovePtr<Operation>(new WriteImplementation(context, resource));
4848 }
4849
build(OperationContext &,Resource &,Resource &) const4850 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
4851 {
4852 DE_ASSERT(0);
4853 return de::MovePtr<Operation>();
4854 }
4855 };
4856
4857 } // IndirectBuffer ns
4858
4859 namespace VertexInput
4860 {
4861
4862 enum DrawMode
4863 {
4864 DRAW_MODE_VERTEX = 0,
4865 DRAW_MODE_INDEXED,
4866 };
4867
4868 class Implementation : public Operation
4869 {
4870 public:
Implementation(OperationContext & context,Resource & resource,DrawMode drawMode)4871 Implementation (OperationContext& context, Resource& resource, DrawMode drawMode)
4872 : m_context (context)
4873 , m_resource (resource)
4874 , m_drawMode (drawMode)
4875 {
4876 requireFeaturesForSSBOAccess (m_context, VK_SHADER_STAGE_VERTEX_BIT);
4877
4878 const DeviceInterface& vk = context.getDeviceInterface();
4879 const VkDevice device = context.getDevice();
4880 Allocator& allocator = context.getAllocator();
4881 VkFormat attributeFormat = VK_FORMAT_R32G32B32A32_UINT;
4882 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
4883
4884 // allocate ssbo that will store data used for verification
4885 {
4886 m_outputBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
4887 makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_STORAGE_BUFFER_BIT), MemoryRequirement::HostVisible));
4888
4889 const Allocation& alloc = m_outputBuffer->getAllocation();
4890 deMemset(alloc.getHostPtr(), 0, static_cast<size_t>(dataSizeBytes));
4891 flushAlloc(vk, device, alloc);
4892 }
4893
4894 // allocate buffer that will be used for vertex attributes when we use resource for indices
4895 if (m_drawMode == DRAW_MODE_INDEXED)
4896 {
4897 attributeFormat = VK_FORMAT_R32_UINT;
4898
4899 m_inputBuffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator,
4900 makeBufferCreateInfo(dataSizeBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
4901
4902 const Allocation& alloc = m_inputBuffer->getAllocation();
4903 fillPattern(alloc.getHostPtr(), dataSizeBytes, true);
4904 flushAlloc(vk, device, alloc);
4905 }
4906
4907 m_descriptorSetLayout = DescriptorSetLayoutBuilder()
4908 .addSingleBinding (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, VK_SHADER_STAGE_VERTEX_BIT)
4909 .build (vk, device);
4910
4911 m_descriptorPool = DescriptorPoolBuilder()
4912 .addType (VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
4913 .build (vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u);
4914
4915 m_descriptorSet = makeDescriptorSet(vk, device, *m_descriptorPool, *m_descriptorSetLayout);
4916
4917 const VkDescriptorBufferInfo outputBufferDescriptorInfo = makeDescriptorBufferInfo(m_outputBuffer->get(), 0ull, dataSizeBytes);
4918 DescriptorSetUpdateBuilder()
4919 .writeSingle(*m_descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_STORAGE_BUFFER, &outputBufferDescriptorInfo)
4920 .update (vk, device);
4921
4922 // Color attachment
4923 m_colorFormat = VK_FORMAT_R8G8B8A8_UNORM;
4924 m_colorImageSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u);
4925 m_colorImageExtent = makeExtent3D(16u, 16u, 1u);
4926 m_colorAttachmentImage = de::MovePtr<Image>(new Image(vk, device, allocator,
4927 makeImageCreateInfo(VK_IMAGE_TYPE_2D, m_colorImageExtent, m_colorFormat, VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT),
4928 MemoryRequirement::Any));
4929
4930 // Pipeline
4931 m_colorAttachmentView = makeImageView (vk, device, **m_colorAttachmentImage, VK_IMAGE_VIEW_TYPE_2D, m_colorFormat, m_colorImageSubresourceRange);
4932 m_renderPass = makeRenderPass (vk, device, m_colorFormat);
4933 m_framebuffer = makeFramebuffer (vk, device, *m_renderPass, *m_colorAttachmentView, m_colorImageExtent.width, m_colorImageExtent.height);
4934 m_pipelineLayout = makePipelineLayout(vk, device, *m_descriptorSetLayout);
4935
4936 m_pipeline = GraphicsPipelineBuilder()
4937 .setPrimitiveTopology (VK_PRIMITIVE_TOPOLOGY_POINT_LIST)
4938 .setRenderSize (tcu::IVec2(static_cast<int>(m_colorImageExtent.width), static_cast<int>(m_colorImageExtent.height)))
4939 .setVertexInputSingleAttribute (attributeFormat, tcu::getPixelSize(mapVkFormat(attributeFormat)))
4940 .setShader (vk, device, VK_SHADER_STAGE_VERTEX_BIT, context.getBinaryCollection().get("input_vert"), DE_NULL)
4941 .setShader (vk, device, VK_SHADER_STAGE_FRAGMENT_BIT, context.getBinaryCollection().get("input_frag"), DE_NULL)
4942 .build (vk, device, *m_pipelineLayout, *m_renderPass, context.getPipelineCacheData());
4943 }
4944
recordCommands(const VkCommandBuffer cmdBuffer)4945 void recordCommands (const VkCommandBuffer cmdBuffer)
4946 {
4947 const DeviceInterface& vk = m_context.getDeviceInterface();
4948 const VkDeviceSize dataSizeBytes = m_resource.getBuffer().size;
4949 SynchronizationWrapperPtr synchronizationWrapper = getSynchronizationWrapper(m_context.getSynchronizationType(), vk, DE_FALSE);
4950
4951 // Change color attachment image layout
4952 {
4953 const VkImageMemoryBarrier2KHR imageMemoryBarrier2 = makeImageMemoryBarrier2(
4954 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4955 (VkAccessFlags)0, // VkAccessFlags2KHR srcAccessMask
4956 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
4957 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
4958 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout oldLayout
4959 VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, // VkImageLayout newLayout
4960 **m_colorAttachmentImage, // VkImage image
4961 m_colorImageSubresourceRange // VkImageSubresourceRange subresourceRange
4962 );
4963 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, DE_NULL, &imageMemoryBarrier2);
4964 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
4965 }
4966
4967 {
4968 const VkRect2D renderArea = makeRect2D(m_colorImageExtent);
4969 const tcu::Vec4 clearColor = tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f);
4970
4971 beginRenderPass(vk, cmdBuffer, *m_renderPass, *m_framebuffer, renderArea, clearColor);
4972 }
4973
4974 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipeline);
4975 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *m_pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
4976
4977 const VkDeviceSize vertexBufferOffset = 0ull;
4978 if (m_drawMode == DRAW_MODE_VERTEX)
4979 {
4980 const deUint32 count = static_cast<deUint32>(dataSizeBytes / sizeof(tcu::UVec4));
4981 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &m_resource.getBuffer().handle, &vertexBufferOffset);
4982 vk.cmdDraw(cmdBuffer, count, 1u, 0u, 0u);
4983 }
4984 else // (m_drawMode == DRAW_MODE_INDEXED)
4985 {
4986 const deUint32 count = static_cast<deUint32>(dataSizeBytes / sizeof(deUint32));
4987 vk.cmdBindVertexBuffers(cmdBuffer, 0u, 1u, &**m_inputBuffer, &vertexBufferOffset);
4988 vk.cmdBindIndexBuffer(cmdBuffer, m_resource.getBuffer().handle, 0u, VK_INDEX_TYPE_UINT32);
4989 vk.cmdDrawIndexed(cmdBuffer, count, 1, 0, 0, 0);
4990 }
4991
4992 endRenderPass(vk, cmdBuffer);
4993
4994 // Insert a barrier so data written by the shader is available to the host
4995 {
4996 const VkBufferMemoryBarrier2KHR bufferMemoryBarrier2 = makeBufferMemoryBarrier2(
4997 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR, // VkPipelineStageFlags2KHR srcStageMask
4998 VK_ACCESS_2_SHADER_WRITE_BIT_KHR, // VkAccessFlags2KHR srcAccessMask
4999 VK_PIPELINE_STAGE_2_HOST_BIT_KHR, // VkPipelineStageFlags2KHR dstStageMask
5000 VK_ACCESS_2_HOST_READ_BIT_KHR, // VkAccessFlags2KHR dstAccessMask
5001 **m_outputBuffer, // VkBuffer buffer
5002 0u, // VkDeviceSize offset
5003 m_resource.getBuffer().size // VkDeviceSize size
5004 );
5005 VkDependencyInfoKHR dependencyInfo = makeCommonDependencyInfo(DE_NULL, &bufferMemoryBarrier2);
5006 synchronizationWrapper->cmdPipelineBarrier(cmdBuffer, &dependencyInfo);
5007 }
5008 }
5009
getInSyncInfo(void) const5010 SyncInfo getInSyncInfo (void) const
5011 {
5012 const bool usingIndexedDraw = (m_drawMode == DRAW_MODE_INDEXED);
5013 VkPipelineStageFlags2KHR stageMask = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR;
5014 VkAccessFlags2KHR accessMask = usingIndexedDraw ? VK_ACCESS_2_INDEX_READ_BIT_KHR
5015 : VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR;
5016
5017 if (m_context.getSynchronizationType() == SynchronizationType::SYNCHRONIZATION2)
5018 {
5019 // test new stages added with VK_KHR_synchronization2 (no need to further duplicate those tests);
5020 // with this operation we can test pre_rasterization, index_input and attribute_input flags;
5021 // since this operation is executed for three buffers of different size we use diferent flags depending on the size
5022 if (m_resource.getBuffer().size > MAX_UPDATE_BUFFER_SIZE)
5023 stageMask = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR;
5024 else
5025 stageMask = usingIndexedDraw ? VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR
5026 : VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR;
5027 }
5028
5029 const SyncInfo syncInfo =
5030 {
5031 stageMask, // VkPipelineStageFlags stageMask;
5032 accessMask, // VkAccessFlags accessMask;
5033 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout imageLayout;
5034 };
5035 return syncInfo;
5036 }
5037
getOutSyncInfo(void) const5038 SyncInfo getOutSyncInfo (void) const
5039 {
5040 return emptySyncInfo;
5041 }
5042
getData(void) const5043 Data getData (void) const
5044 {
5045 return getHostBufferData(m_context, *m_outputBuffer, m_resource.getBuffer().size);
5046 }
5047
setData(const Data & data)5048 void setData (const Data& data)
5049 {
5050 setHostBufferData(m_context, *m_outputBuffer, data);
5051 }
5052
5053 private:
5054 OperationContext& m_context;
5055 Resource& m_resource;
5056 DrawMode m_drawMode;
5057 de::MovePtr<Buffer> m_inputBuffer;
5058 de::MovePtr<Buffer> m_outputBuffer;
5059 Move<VkRenderPass> m_renderPass;
5060 Move<VkFramebuffer> m_framebuffer;
5061 Move<VkPipelineLayout> m_pipelineLayout;
5062 Move<VkPipeline> m_pipeline;
5063 VkFormat m_colorFormat;
5064 de::MovePtr<Image> m_colorAttachmentImage;
5065 Move<VkImageView> m_colorAttachmentView;
5066 VkExtent3D m_colorImageExtent;
5067 VkImageSubresourceRange m_colorImageSubresourceRange;
5068 Move<VkDescriptorPool> m_descriptorPool;
5069 Move<VkDescriptorSetLayout> m_descriptorSetLayout;
5070 Move<VkDescriptorSet> m_descriptorSet;
5071 };
5072
5073 class Support : public OperationSupport
5074 {
5075 public:
Support(const ResourceDescription & resourceDesc,DrawMode drawMode)5076 Support(const ResourceDescription& resourceDesc, DrawMode drawMode)
5077 : m_resourceDesc (resourceDesc)
5078 , m_drawMode (drawMode)
5079 {
5080 DE_ASSERT(m_resourceDesc.type == RESOURCE_TYPE_BUFFER || m_resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER);
5081 }
5082
initPrograms(SourceCollections & programCollection) const5083 void initPrograms (SourceCollections& programCollection) const
5084 {
5085 // Vertex
5086 {
5087 std::ostringstream src;
5088 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n";
5089 if (m_drawMode == DRAW_MODE_VERTEX)
5090 {
5091 src << "layout(location = 0) in uvec4 v_in_data;\n"
5092 << "layout(set = 0, binding = 0, std140) writeonly buffer Output {\n"
5093 << " uvec4 data[" << m_resourceDesc.size.x() / sizeof(tcu::UVec4) << "];\n"
5094 << "} b_out;\n"
5095 << "\n"
5096 << "void main (void)\n"
5097 << "{\n"
5098 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5099 << " gl_PointSize = 1.0f;\n"
5100 << "}\n";
5101 }
5102 else // DRAW_MODE_INDEXED
5103 {
5104 src << "layout(location = 0) in uint v_in_data;\n"
5105 << "layout(set = 0, binding = 0, std430) writeonly buffer Output {\n"
5106 << " uint data[" << m_resourceDesc.size.x() / sizeof(deUint32) << "];\n"
5107 << "} b_out;\n"
5108 << "\n"
5109 << "void main (void)\n"
5110 << "{\n"
5111 << " b_out.data[gl_VertexIndex] = v_in_data;\n"
5112 << " gl_PointSize = 1.0f;\n"
5113 << "}\n";
5114 }
5115 programCollection.glslSources.add("input_vert") << glu::VertexSource(src.str());
5116 }
5117
5118 // Fragment
5119 {
5120 std::ostringstream src;
5121 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_440) << "\n"
5122 << "\n"
5123 << "layout(location = 0) out vec4 o_color;\n"
5124 << "\n"
5125 << "void main (void)\n"
5126 << "{\n"
5127 << " o_color = vec4(1.0);\n"
5128 << "}\n";
5129 programCollection.glslSources.add("input_frag") << glu::FragmentSource(src.str());
5130 }
5131 }
5132
getInResourceUsageFlags(void) const5133 deUint32 getInResourceUsageFlags (void) const
5134 {
5135 return (m_drawMode == DRAW_MODE_VERTEX) ? VK_BUFFER_USAGE_VERTEX_BUFFER_BIT : VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
5136 }
5137
getOutResourceUsageFlags(void) const5138 deUint32 getOutResourceUsageFlags (void) const
5139 {
5140 return VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
5141 }
5142
getQueueFlags(const OperationContext &) const5143 VkQueueFlags getQueueFlags (const OperationContext&) const
5144 {
5145 return VK_QUEUE_GRAPHICS_BIT;
5146 }
5147
build(OperationContext & context,Resource & resource) const5148 de::MovePtr<Operation> build (OperationContext& context, Resource& resource) const
5149 {
5150 return de::MovePtr<Operation>(new Implementation(context, resource, m_drawMode));
5151 }
5152
build(OperationContext &,Resource &,Resource &) const5153 de::MovePtr<Operation> build (OperationContext&, Resource&, Resource&) const
5154 {
5155 DE_ASSERT(0);
5156 return de::MovePtr<Operation>();
5157 }
5158
5159 private:
5160 const ResourceDescription m_resourceDesc;
5161 const DrawMode m_drawMode;
5162 };
5163
5164 } // VertexInput
5165
5166 } // anonymous ns
5167
OperationContext(Context & context,SynchronizationType syncType,PipelineCacheData & pipelineCacheData)5168 OperationContext::OperationContext (Context& context, SynchronizationType syncType, PipelineCacheData& pipelineCacheData)
5169 : m_context (context)
5170 , m_syncType (syncType)
5171 , m_vki (context.getInstanceInterface())
5172 , m_vk (context.getDeviceInterface())
5173 , m_physicalDevice (context.getPhysicalDevice())
5174 , m_device (context.getDevice())
5175 , m_allocator (context.getDefaultAllocator())
5176 , m_progCollection (context.getBinaryCollection())
5177 , m_pipelineCacheData (pipelineCacheData)
5178 {
5179 }
5180
OperationContext(Context & context,SynchronizationType syncType,const DeviceInterface & vk,const VkDevice device,vk::Allocator & allocator,PipelineCacheData & pipelineCacheData)5181 OperationContext::OperationContext (Context& context,
5182 SynchronizationType syncType,
5183 const DeviceInterface& vk,
5184 const VkDevice device,
5185 vk::Allocator& allocator,
5186 PipelineCacheData& pipelineCacheData)
5187 : m_context (context)
5188 , m_syncType (syncType)
5189 , m_vki (context.getInstanceInterface())
5190 , m_vk (vk)
5191 , m_physicalDevice (context.getPhysicalDevice())
5192 , m_device (device)
5193 , m_allocator (allocator)
5194 , m_progCollection (context.getBinaryCollection())
5195 , m_pipelineCacheData (pipelineCacheData)
5196 {
5197 }
5198
OperationContext(Context & context,SynchronizationType syncType,const vk::InstanceInterface & vki,const vk::DeviceInterface & vkd,vk::VkPhysicalDevice physicalDevice,vk::VkDevice device,vk::Allocator & allocator,vk::BinaryCollection & programCollection,PipelineCacheData & pipelineCacheData)5199 OperationContext::OperationContext (Context& context,
5200 SynchronizationType syncType,
5201 const vk::InstanceInterface& vki,
5202 const vk::DeviceInterface& vkd,
5203 vk::VkPhysicalDevice physicalDevice,
5204 vk::VkDevice device,
5205 vk::Allocator& allocator,
5206 vk::BinaryCollection& programCollection,
5207 PipelineCacheData& pipelineCacheData)
5208 : m_context (context)
5209 , m_syncType (syncType)
5210 , m_vki (vki)
5211 , m_vk (vkd)
5212 , m_physicalDevice (physicalDevice)
5213 , m_device (device)
5214 , m_allocator (allocator)
5215 , m_progCollection (programCollection)
5216 , m_pipelineCacheData (pipelineCacheData)
5217 {
5218 }
5219
Resource(OperationContext & context,const ResourceDescription & desc,const deUint32 usage,const vk::VkSharingMode sharingMode,const std::vector<deUint32> & queueFamilyIndex)5220 Resource::Resource (OperationContext& context, const ResourceDescription& desc, const deUint32 usage, const vk::VkSharingMode sharingMode, const std::vector<deUint32>& queueFamilyIndex)
5221 : m_type (desc.type)
5222 {
5223 const DeviceInterface& vk = context.getDeviceInterface();
5224 const InstanceInterface& vki = context.getInstanceInterface();
5225 const VkDevice device = context.getDevice();
5226 const VkPhysicalDevice physDevice = context.getPhysicalDevice();
5227 Allocator& allocator = context.getAllocator();
5228
5229 if (m_type == RESOURCE_TYPE_BUFFER || m_type == RESOURCE_TYPE_INDEX_BUFFER || isIndirectBuffer(m_type))
5230 {
5231 m_bufferData.offset = 0u;
5232 m_bufferData.size = static_cast<VkDeviceSize>(desc.size.x());
5233 VkBufferCreateInfo bufferCreateInfo = makeBufferCreateInfo(m_bufferData.size, usage);
5234 bufferCreateInfo.sharingMode = sharingMode;
5235 if (queueFamilyIndex.size() > 0)
5236 {
5237 bufferCreateInfo.queueFamilyIndexCount = static_cast<deUint32>(queueFamilyIndex.size());
5238 bufferCreateInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5239 }
5240 m_buffer = de::MovePtr<Buffer>(new Buffer(vk, device, allocator, bufferCreateInfo, MemoryRequirement::Any));
5241 m_bufferData.handle = **m_buffer;
5242 }
5243 else if (m_type == RESOURCE_TYPE_IMAGE)
5244 {
5245 m_imageData.extent = makeExtent3D(desc.size.x(), std::max(1, desc.size.y()), std::max(1, desc.size.z()));
5246 m_imageData.imageType = desc.imageType;
5247 m_imageData.format = desc.imageFormat;
5248 m_imageData.subresourceRange = makeImageSubresourceRange(desc.imageAspect, 0u, 1u, 0u, 1u);
5249 m_imageData.subresourceLayers = makeImageSubresourceLayers(desc.imageAspect, 0u, 0u, 1u);
5250 VkImageCreateInfo imageInfo = makeImageCreateInfo(m_imageData.imageType, m_imageData.extent, m_imageData.format, usage, desc.imageSamples);
5251 imageInfo.sharingMode = sharingMode;
5252 if (queueFamilyIndex.size() > 0)
5253 {
5254 imageInfo.queueFamilyIndexCount = static_cast<deUint32>(queueFamilyIndex.size());
5255 imageInfo.pQueueFamilyIndices = &queueFamilyIndex[0];
5256 }
5257
5258 VkImageFormatProperties imageFormatProperties;
5259 const VkResult formatResult = vki.getPhysicalDeviceImageFormatProperties(physDevice, imageInfo.format, imageInfo.imageType, imageInfo.tiling, imageInfo.usage, imageInfo.flags, &imageFormatProperties);
5260
5261 if (formatResult != VK_SUCCESS)
5262 TCU_THROW(NotSupportedError, "Image format is not supported");
5263
5264 if ((imageFormatProperties.sampleCounts & desc.imageSamples) != desc.imageSamples)
5265 TCU_THROW(NotSupportedError, "Requested sample count is not supported");
5266
5267 m_image = de::MovePtr<Image>(new Image(vk, device, allocator, imageInfo, MemoryRequirement::Any));
5268 m_imageData.handle = **m_image;
5269 }
5270 else
5271 DE_ASSERT(0);
5272 }
5273
Resource(ResourceType type,vk::Move<vk::VkBuffer> buffer,de::MovePtr<vk::Allocation> allocation,vk::VkDeviceSize offset,vk::VkDeviceSize size)5274 Resource::Resource (ResourceType type,
5275 vk::Move<vk::VkBuffer> buffer,
5276 de::MovePtr<vk::Allocation> allocation,
5277 vk::VkDeviceSize offset,
5278 vk::VkDeviceSize size)
5279 : m_type (type)
5280 , m_buffer (new Buffer(buffer, allocation))
5281 {
5282 DE_ASSERT(type != RESOURCE_TYPE_IMAGE);
5283
5284 m_bufferData.handle = m_buffer->get();
5285 m_bufferData.offset = offset;
5286 m_bufferData.size = size;
5287 }
5288
Resource(vk::Move<vk::VkImage> image,de::MovePtr<vk::Allocation> allocation,const vk::VkExtent3D & extent,vk::VkImageType imageType,vk::VkFormat format,vk::VkImageSubresourceRange subresourceRange,vk::VkImageSubresourceLayers subresourceLayers)5289 Resource::Resource (vk::Move<vk::VkImage> image,
5290 de::MovePtr<vk::Allocation> allocation,
5291 const vk::VkExtent3D& extent,
5292 vk::VkImageType imageType,
5293 vk::VkFormat format,
5294 vk::VkImageSubresourceRange subresourceRange,
5295 vk::VkImageSubresourceLayers subresourceLayers)
5296 : m_type (RESOURCE_TYPE_IMAGE)
5297 , m_image (new Image(image, allocation))
5298 {
5299 m_imageData.handle = m_image->get();
5300 m_imageData.extent = extent;
5301 m_imageData.imageType = imageType;
5302 m_imageData.format = format;
5303 m_imageData.subresourceRange = subresourceRange;
5304 m_imageData.subresourceLayers = subresourceLayers;
5305 }
5306
getMemory(void) const5307 vk::VkDeviceMemory Resource::getMemory (void) const
5308 {
5309 if (m_type == RESOURCE_TYPE_IMAGE)
5310 return m_image->getAllocation().getMemory();
5311 else
5312 return m_buffer->getAllocation().getMemory();
5313 }
5314
5315 //! \note This function exists for performance reasons. We're creating a lot of tests and checking requirements here
5316 //! before creating an OperationSupport object is faster.
isResourceSupported(const OperationName opName,const ResourceDescription & resourceDesc)5317 bool isResourceSupported (const OperationName opName, const ResourceDescription& resourceDesc)
5318 {
5319 switch (opName)
5320 {
5321 case OPERATION_NAME_WRITE_FILL_BUFFER:
5322 case OPERATION_NAME_WRITE_COPY_BUFFER:
5323 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER:
5324 case OPERATION_NAME_WRITE_SSBO_VERTEX:
5325 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL:
5326 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION:
5327 case OPERATION_NAME_WRITE_SSBO_GEOMETRY:
5328 case OPERATION_NAME_WRITE_SSBO_FRAGMENT:
5329 case OPERATION_NAME_WRITE_SSBO_COMPUTE:
5330 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT:
5331 case OPERATION_NAME_READ_COPY_BUFFER:
5332 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE:
5333 case OPERATION_NAME_READ_SSBO_VERTEX:
5334 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL:
5335 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION:
5336 case OPERATION_NAME_READ_SSBO_GEOMETRY:
5337 case OPERATION_NAME_READ_SSBO_FRAGMENT:
5338 case OPERATION_NAME_READ_SSBO_COMPUTE:
5339 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT:
5340 case OPERATION_NAME_READ_VERTEX_INPUT:
5341 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5342
5343 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW:
5344 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW:
5345 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW;
5346
5347 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED:
5348 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED:
5349 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DRAW_INDEXED;
5350
5351 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH:
5352 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH:
5353 return resourceDesc.type == RESOURCE_TYPE_INDIRECT_BUFFER_DISPATCH;
5354
5355 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER:
5356 case OPERATION_NAME_READ_INDEX_INPUT:
5357 return resourceDesc.type == RESOURCE_TYPE_INDEX_BUFFER;
5358
5359 case OPERATION_NAME_WRITE_UPDATE_BUFFER:
5360 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UPDATE_BUFFER_SIZE;
5361
5362 case OPERATION_NAME_WRITE_COPY_IMAGE:
5363 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE:
5364 case OPERATION_NAME_READ_COPY_IMAGE:
5365 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER:
5366 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5367
5368 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS:
5369 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType != VK_IMAGE_TYPE_3D
5370 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5371
5372 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE:
5373 case OPERATION_NAME_READ_RESOLVE_IMAGE:
5374 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5375 && resourceDesc.imageSamples != VK_SAMPLE_COUNT_1_BIT;
5376
5377 case OPERATION_NAME_WRITE_BLIT_IMAGE:
5378 case OPERATION_NAME_READ_BLIT_IMAGE:
5379 case OPERATION_NAME_WRITE_IMAGE_VERTEX:
5380 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL:
5381 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION:
5382 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY:
5383 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT:
5384 case OPERATION_NAME_WRITE_IMAGE_COMPUTE:
5385 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT:
5386 case OPERATION_NAME_READ_IMAGE_VERTEX:
5387 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL:
5388 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION:
5389 case OPERATION_NAME_READ_IMAGE_GEOMETRY:
5390 case OPERATION_NAME_READ_IMAGE_FRAGMENT:
5391 case OPERATION_NAME_READ_IMAGE_COMPUTE:
5392 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT:
5393 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5394 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5395
5396 case OPERATION_NAME_READ_UBO_VERTEX:
5397 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL:
5398 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION:
5399 case OPERATION_NAME_READ_UBO_GEOMETRY:
5400 case OPERATION_NAME_READ_UBO_FRAGMENT:
5401 case OPERATION_NAME_READ_UBO_COMPUTE:
5402 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT:
5403 return resourceDesc.type == RESOURCE_TYPE_BUFFER && resourceDesc.size.x() <= MAX_UBO_RANGE;
5404
5405 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE:
5406 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5407 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5408
5409 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE:
5410 return resourceDesc.type == RESOURCE_TYPE_IMAGE && (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT))
5411 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5412
5413 case OPERATION_NAME_WRITE_DRAW:
5414 case OPERATION_NAME_WRITE_DRAW_INDEXED:
5415 case OPERATION_NAME_WRITE_DRAW_INDIRECT:
5416 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT:
5417 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageType == VK_IMAGE_TYPE_2D
5418 && (resourceDesc.imageAspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) == 0
5419 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5420
5421 case OPERATION_NAME_COPY_BUFFER:
5422 case OPERATION_NAME_COPY_SSBO_VERTEX:
5423 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL:
5424 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION:
5425 case OPERATION_NAME_COPY_SSBO_GEOMETRY:
5426 case OPERATION_NAME_COPY_SSBO_FRAGMENT:
5427 case OPERATION_NAME_COPY_SSBO_COMPUTE:
5428 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT:
5429 return resourceDesc.type == RESOURCE_TYPE_BUFFER;
5430
5431 case OPERATION_NAME_COPY_IMAGE:
5432 case OPERATION_NAME_BLIT_IMAGE:
5433 case OPERATION_NAME_COPY_IMAGE_VERTEX:
5434 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL:
5435 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION:
5436 case OPERATION_NAME_COPY_IMAGE_GEOMETRY:
5437 case OPERATION_NAME_COPY_IMAGE_FRAGMENT:
5438 case OPERATION_NAME_COPY_IMAGE_COMPUTE:
5439 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT:
5440 return resourceDesc.type == RESOURCE_TYPE_IMAGE && resourceDesc.imageAspect == VK_IMAGE_ASPECT_COLOR_BIT
5441 && resourceDesc.imageSamples == VK_SAMPLE_COUNT_1_BIT;
5442
5443 default:
5444 DE_ASSERT(0);
5445 return false;
5446 }
5447 }
5448
getOperationName(const OperationName opName)5449 std::string getOperationName (const OperationName opName)
5450 {
5451 switch (opName)
5452 {
5453 case OPERATION_NAME_WRITE_FILL_BUFFER: return "write_fill_buffer";
5454 case OPERATION_NAME_WRITE_UPDATE_BUFFER: return "write_update_buffer";
5455 case OPERATION_NAME_WRITE_COPY_BUFFER: return "write_copy_buffer";
5456 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE: return "write_copy_buffer_to_image";
5457 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER: return "write_copy_image_to_buffer";
5458 case OPERATION_NAME_WRITE_COPY_IMAGE: return "write_copy_image";
5459 case OPERATION_NAME_WRITE_BLIT_IMAGE: return "write_blit_image";
5460 case OPERATION_NAME_WRITE_SSBO_VERTEX: return "write_ssbo_vertex";
5461 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL: return "write_ssbo_tess_control";
5462 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION: return "write_ssbo_tess_eval";
5463 case OPERATION_NAME_WRITE_SSBO_GEOMETRY: return "write_ssbo_geometry";
5464 case OPERATION_NAME_WRITE_SSBO_FRAGMENT: return "write_ssbo_fragment";
5465 case OPERATION_NAME_WRITE_SSBO_COMPUTE: return "write_ssbo_compute";
5466 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT: return "write_ssbo_compute_indirect";
5467 case OPERATION_NAME_WRITE_IMAGE_VERTEX: return "write_image_vertex";
5468 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL: return "write_image_tess_control";
5469 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION: return "write_image_tess_eval";
5470 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY: return "write_image_geometry";
5471 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT: return "write_image_fragment";
5472 case OPERATION_NAME_WRITE_IMAGE_COMPUTE: return "write_image_compute";
5473 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE: return "write_image_compute_multisample";
5474 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT: return "write_image_compute_indirect";
5475 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE: return "write_clear_color_image";
5476 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE: return "write_clear_depth_stencil_image";
5477 case OPERATION_NAME_WRITE_DRAW: return "write_draw";
5478 case OPERATION_NAME_WRITE_DRAW_INDEXED: return "write_draw_indexed";
5479 case OPERATION_NAME_WRITE_DRAW_INDIRECT: return "write_draw_indirect";
5480 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT: return "write_draw_indexed_indirect";
5481 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS: return "write_clear_attachments";
5482 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW: return "write_indirect_buffer_draw";
5483 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED: return "write_indirect_buffer_draw_indexed";
5484 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH: return "write_indirect_buffer_dispatch";
5485 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER: return "write_update_index_buffer";
5486
5487 case OPERATION_NAME_READ_COPY_BUFFER: return "read_copy_buffer";
5488 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE: return "read_copy_buffer_to_image";
5489 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER: return "read_copy_image_to_buffer";
5490 case OPERATION_NAME_READ_COPY_IMAGE: return "read_copy_image";
5491 case OPERATION_NAME_READ_BLIT_IMAGE: return "read_blit_image";
5492 case OPERATION_NAME_READ_RESOLVE_IMAGE: return "read_resolve_image";
5493 case OPERATION_NAME_READ_UBO_VERTEX: return "read_ubo_vertex";
5494 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL: return "read_ubo_tess_control";
5495 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION: return "read_ubo_tess_eval";
5496 case OPERATION_NAME_READ_UBO_GEOMETRY: return "read_ubo_geometry";
5497 case OPERATION_NAME_READ_UBO_FRAGMENT: return "read_ubo_fragment";
5498 case OPERATION_NAME_READ_UBO_COMPUTE: return "read_ubo_compute";
5499 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT: return "read_ubo_compute_indirect";
5500 case OPERATION_NAME_READ_SSBO_VERTEX: return "read_ssbo_vertex";
5501 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL: return "read_ssbo_tess_control";
5502 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION: return "read_ssbo_tess_eval";
5503 case OPERATION_NAME_READ_SSBO_GEOMETRY: return "read_ssbo_geometry";
5504 case OPERATION_NAME_READ_SSBO_FRAGMENT: return "read_ssbo_fragment";
5505 case OPERATION_NAME_READ_SSBO_COMPUTE: return "read_ssbo_compute";
5506 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT: return "read_ssbo_compute_indirect";
5507 case OPERATION_NAME_READ_IMAGE_VERTEX: return "read_image_vertex";
5508 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL: return "read_image_tess_control";
5509 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION: return "read_image_tess_eval";
5510 case OPERATION_NAME_READ_IMAGE_GEOMETRY: return "read_image_geometry";
5511 case OPERATION_NAME_READ_IMAGE_FRAGMENT: return "read_image_fragment";
5512 case OPERATION_NAME_READ_IMAGE_COMPUTE: return "read_image_compute";
5513 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT: return "read_image_compute_indirect";
5514 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW: return "read_indirect_buffer_draw";
5515 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED: return "read_indirect_buffer_draw_indexed";
5516 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH: return "read_indirect_buffer_dispatch";
5517 case OPERATION_NAME_READ_VERTEX_INPUT: return "read_vertex_input";
5518 case OPERATION_NAME_READ_INDEX_INPUT: return "read_index_input";
5519
5520 case OPERATION_NAME_COPY_BUFFER: return "copy_buffer";
5521 case OPERATION_NAME_COPY_IMAGE: return "copy_image";
5522 case OPERATION_NAME_BLIT_IMAGE: return "blit_image";
5523 case OPERATION_NAME_COPY_SSBO_VERTEX: return "copy_buffer_vertex";
5524 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL: return "copy_ssbo_tess_control";
5525 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION: return "copy_ssbo_tess_eval";
5526 case OPERATION_NAME_COPY_SSBO_GEOMETRY: return "copy_ssbo_geometry";
5527 case OPERATION_NAME_COPY_SSBO_FRAGMENT: return "copy_ssbo_fragment";
5528 case OPERATION_NAME_COPY_SSBO_COMPUTE: return "copy_ssbo_compute";
5529 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT: return "copy_ssbo_compute_indirect";
5530 case OPERATION_NAME_COPY_IMAGE_VERTEX: return "copy_image_vertex";
5531 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL: return "copy_image_tess_control";
5532 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION: return "copy_image_tess_eval";
5533 case OPERATION_NAME_COPY_IMAGE_GEOMETRY: return "copy_image_geometry";
5534 case OPERATION_NAME_COPY_IMAGE_FRAGMENT: return "copy_image_fragment";
5535 case OPERATION_NAME_COPY_IMAGE_COMPUTE: return "copy_image_compute";
5536 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT: return "copy_image_compute_indirect";
5537 default:
5538 DE_ASSERT(0);
5539 return "";
5540 }
5541 }
5542
makeOperationSupport(const OperationName opName,const ResourceDescription & resourceDesc)5543 de::MovePtr<OperationSupport> makeOperationSupport (const OperationName opName, const ResourceDescription& resourceDesc)
5544 {
5545 switch (opName)
5546 {
5547 case OPERATION_NAME_WRITE_FILL_BUFFER: return de::MovePtr<OperationSupport>(new FillUpdateBuffer ::Support (resourceDesc, FillUpdateBuffer::BUFFER_OP_FILL));
5548 case OPERATION_NAME_WRITE_UPDATE_BUFFER: return de::MovePtr<OperationSupport>(new FillUpdateBuffer ::Support (resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE));
5549 case OPERATION_NAME_WRITE_COPY_BUFFER: return de::MovePtr<OperationSupport>(new CopyBuffer ::Support (resourceDesc, ACCESS_MODE_WRITE));
5550 case OPERATION_NAME_WRITE_COPY_BUFFER_TO_IMAGE: return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support (resourceDesc, ACCESS_MODE_WRITE));
5551 case OPERATION_NAME_WRITE_COPY_IMAGE_TO_BUFFER: return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support (resourceDesc, ACCESS_MODE_WRITE));
5552 case OPERATION_NAME_WRITE_COPY_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage ::Support (resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_WRITE));
5553 case OPERATION_NAME_WRITE_BLIT_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage ::Support (resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_WRITE));
5554 case OPERATION_NAME_WRITE_SSBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_VERTEX_BIT));
5555 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5556 case OPERATION_NAME_WRITE_SSBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5557 case OPERATION_NAME_WRITE_SSBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_GEOMETRY_BIT));
5558 case OPERATION_NAME_WRITE_SSBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_FRAGMENT_BIT));
5559 case OPERATION_NAME_WRITE_SSBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT));
5560 case OPERATION_NAME_WRITE_SSBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5561 case OPERATION_NAME_WRITE_IMAGE_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_VERTEX_BIT));
5562 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5563 case OPERATION_NAME_WRITE_IMAGE_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5564 case OPERATION_NAME_WRITE_IMAGE_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_GEOMETRY_BIT));
5565 case OPERATION_NAME_WRITE_IMAGE_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_FRAGMENT_BIT));
5566 case OPERATION_NAME_WRITE_IMAGE_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT));
5567 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_WRITE, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5568 case OPERATION_NAME_WRITE_IMAGE_COMPUTE_MULTISAMPLE: return de::MovePtr<OperationSupport>(new ShaderAccess ::MSImageSupport(resourceDesc));
5569 case OPERATION_NAME_WRITE_CLEAR_COLOR_IMAGE: return de::MovePtr<OperationSupport>(new ClearImage ::Support (resourceDesc, ClearImage::CLEAR_MODE_COLOR));
5570 case OPERATION_NAME_WRITE_CLEAR_DEPTH_STENCIL_IMAGE: return de::MovePtr<OperationSupport>(new ClearImage ::Support (resourceDesc, ClearImage::CLEAR_MODE_DEPTH_STENCIL));
5571 case OPERATION_NAME_WRITE_DRAW: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW));
5572 case OPERATION_NAME_WRITE_DRAW_INDEXED: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED));
5573 case OPERATION_NAME_WRITE_DRAW_INDIRECT: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW_INDIRECT));
5574 case OPERATION_NAME_WRITE_DRAW_INDEXED_INDIRECT: return de::MovePtr<OperationSupport>(new Draw ::Support (resourceDesc, Draw::DRAW_CALL_DRAW_INDEXED_INDIRECT));
5575 case OPERATION_NAME_WRITE_CLEAR_ATTACHMENTS: return de::MovePtr<OperationSupport>(new ClearAttachments ::Support (resourceDesc));
5576 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW: return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport (resourceDesc));
5577 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DRAW_INDEXED: return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport (resourceDesc));
5578 case OPERATION_NAME_WRITE_INDIRECT_BUFFER_DISPATCH: return de::MovePtr<OperationSupport>(new IndirectBuffer ::WriteSupport (resourceDesc));
5579 case OPERATION_NAME_WRITE_UPDATE_INDEX_BUFFER: return de::MovePtr<OperationSupport>(new FillUpdateBuffer ::Support (resourceDesc, FillUpdateBuffer::BUFFER_OP_UPDATE_WITH_INDEX_PATTERN));
5580
5581 case OPERATION_NAME_READ_COPY_BUFFER: return de::MovePtr<OperationSupport>(new CopyBuffer ::Support (resourceDesc, ACCESS_MODE_READ));
5582 case OPERATION_NAME_READ_COPY_BUFFER_TO_IMAGE: return de::MovePtr<OperationSupport>(new CopyBufferToImage ::Support (resourceDesc, ACCESS_MODE_READ));
5583 case OPERATION_NAME_READ_COPY_IMAGE_TO_BUFFER: return de::MovePtr<OperationSupport>(new CopyImageToBuffer ::Support (resourceDesc, ACCESS_MODE_READ));
5584 case OPERATION_NAME_READ_COPY_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::Support (resourceDesc, CopyBlitResolveImage::TYPE_COPY, ACCESS_MODE_READ));
5585 case OPERATION_NAME_READ_BLIT_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::Support (resourceDesc, CopyBlitResolveImage::TYPE_BLIT, ACCESS_MODE_READ));
5586 case OPERATION_NAME_READ_RESOLVE_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::Support (resourceDesc, CopyBlitResolveImage::TYPE_RESOLVE, ACCESS_MODE_READ));
5587 case OPERATION_NAME_READ_UBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_VERTEX_BIT));
5588 case OPERATION_NAME_READ_UBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5589 case OPERATION_NAME_READ_UBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5590 case OPERATION_NAME_READ_UBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_GEOMETRY_BIT));
5591 case OPERATION_NAME_READ_UBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_FRAGMENT_BIT));
5592 case OPERATION_NAME_READ_UBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT));
5593 case OPERATION_NAME_READ_UBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_UNIFORM, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5594 case OPERATION_NAME_READ_SSBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_VERTEX_BIT));
5595 case OPERATION_NAME_READ_SSBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5596 case OPERATION_NAME_READ_SSBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5597 case OPERATION_NAME_READ_SSBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_GEOMETRY_BIT));
5598 case OPERATION_NAME_READ_SSBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_FRAGMENT_BIT));
5599 case OPERATION_NAME_READ_SSBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT));
5600 case OPERATION_NAME_READ_SSBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::BufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5601 case OPERATION_NAME_READ_IMAGE_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_VERTEX_BIT));
5602 case OPERATION_NAME_READ_IMAGE_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5603 case OPERATION_NAME_READ_IMAGE_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5604 case OPERATION_NAME_READ_IMAGE_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_GEOMETRY_BIT));
5605 case OPERATION_NAME_READ_IMAGE_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_FRAGMENT_BIT));
5606 case OPERATION_NAME_READ_IMAGE_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT));
5607 case OPERATION_NAME_READ_IMAGE_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::ImageSupport (resourceDesc, ACCESS_MODE_READ, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5608 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW: return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport (resourceDesc));
5609 case OPERATION_NAME_READ_INDIRECT_BUFFER_DRAW_INDEXED: return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport (resourceDesc));
5610 case OPERATION_NAME_READ_INDIRECT_BUFFER_DISPATCH: return de::MovePtr<OperationSupport>(new IndirectBuffer ::ReadSupport (resourceDesc));
5611 case OPERATION_NAME_READ_VERTEX_INPUT: return de::MovePtr<OperationSupport>(new VertexInput ::Support (resourceDesc, VertexInput::DRAW_MODE_VERTEX));
5612 case OPERATION_NAME_READ_INDEX_INPUT: return de::MovePtr<OperationSupport>(new VertexInput ::Support (resourceDesc, VertexInput::DRAW_MODE_INDEXED));
5613
5614 case OPERATION_NAME_COPY_BUFFER: return de::MovePtr<OperationSupport>(new CopyBuffer ::CopySupport (resourceDesc));
5615 case OPERATION_NAME_COPY_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::CopySupport (resourceDesc, CopyBlitResolveImage::TYPE_COPY));
5616 case OPERATION_NAME_BLIT_IMAGE: return de::MovePtr<OperationSupport>(new CopyBlitResolveImage::CopySupport (resourceDesc, CopyBlitResolveImage::TYPE_BLIT));
5617 case OPERATION_NAME_COPY_SSBO_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, VK_SHADER_STAGE_VERTEX_BIT));
5618 case OPERATION_NAME_COPY_SSBO_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5619 case OPERATION_NAME_COPY_SSBO_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5620 case OPERATION_NAME_COPY_SSBO_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, VK_SHADER_STAGE_GEOMETRY_BIT));
5621 case OPERATION_NAME_COPY_SSBO_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, VK_SHADER_STAGE_FRAGMENT_BIT));
5622 case OPERATION_NAME_COPY_SSBO_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, VK_SHADER_STAGE_COMPUTE_BIT));
5623 case OPERATION_NAME_COPY_SSBO_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyBufferSupport (resourceDesc, BUFFER_TYPE_STORAGE, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5624 case OPERATION_NAME_COPY_IMAGE_VERTEX: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_VERTEX_BIT));
5625 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_CONTROL: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT));
5626 case OPERATION_NAME_COPY_IMAGE_TESSELLATION_EVALUATION: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT));
5627 case OPERATION_NAME_COPY_IMAGE_GEOMETRY: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_GEOMETRY_BIT));
5628 case OPERATION_NAME_COPY_IMAGE_FRAGMENT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_FRAGMENT_BIT));
5629 case OPERATION_NAME_COPY_IMAGE_COMPUTE: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT));
5630 case OPERATION_NAME_COPY_IMAGE_COMPUTE_INDIRECT: return de::MovePtr<OperationSupport>(new ShaderAccess ::CopyImageSupport (resourceDesc, VK_SHADER_STAGE_COMPUTE_BIT, ShaderAccess::DISPATCH_CALL_DISPATCH_INDIRECT));
5631
5632 default:
5633 DE_ASSERT(0);
5634 return de::MovePtr<OperationSupport>();
5635 }
5636 }
5637
5638 } // synchronization
5639 } // vkt
5640