1 /*-------------------------------------------------------------------------
2  * Vulkan Conformance Tests
3  * ------------------------
4  *
5  * Copyright (c) 2015 Google Inc.
6  *
7  * Licensed under the Apache License, Version 2.0 (the "License");
8  * you may not use this file except in compliance with the License.
9  * You may obtain a copy of the License at
10  *
11  *      http://www.apache.org/licenses/LICENSE-2.0
12  *
13  * Unless required by applicable law or agreed to in writing, software
14  * distributed under the License is distributed on an "AS IS" BASIS,
15  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16  * See the License for the specific language governing permissions and
17  * limitations under the License.
18  *
19  *//*!
20  * \file
21  * \brief Pipeline barrier tests
22  *//*--------------------------------------------------------------------*/
23 
24 #include "vktMemoryPipelineBarrierTests.hpp"
25 
26 #include "vktTestCaseUtil.hpp"
27 
28 #include "vkDefs.hpp"
29 #include "vkPlatform.hpp"
30 #include "vkRefUtil.hpp"
31 #include "vkQueryUtil.hpp"
32 #include "vkMemUtil.hpp"
33 #include "vkTypeUtil.hpp"
34 #include "vkPrograms.hpp"
35 #include "vkCmdUtil.hpp"
36 #include "vkObjUtil.hpp"
37 
38 #include "tcuMaybe.hpp"
39 #include "tcuTextureUtil.hpp"
40 #include "tcuTestLog.hpp"
41 #include "tcuResultCollector.hpp"
42 #include "tcuTexture.hpp"
43 #include "tcuImageCompare.hpp"
44 
45 #include "deUniquePtr.hpp"
46 #include "deStringUtil.hpp"
47 #include "deRandom.hpp"
48 
49 #include "deInt32.h"
50 #include "deMath.h"
51 #include "deMemory.h"
52 
53 #include <map>
54 #include <set>
55 #include <sstream>
56 #include <string>
57 #include <vector>
58 
59 using tcu::TestLog;
60 using tcu::Maybe;
61 
62 using de::MovePtr;
63 
64 using std::string;
65 using std::vector;
66 using std::map;
67 using std::set;
68 using std::pair;
69 
70 using tcu::IVec2;
71 using tcu::UVec2;
72 using tcu::UVec4;
73 using tcu::Vec4;
74 using tcu::ConstPixelBufferAccess;
75 using tcu::PixelBufferAccess;
76 using tcu::TextureFormat;
77 using tcu::TextureLevel;
78 
79 namespace vkt
80 {
81 namespace memory
82 {
83 namespace
84 {
85 
86 #define ONE_MEGABYTE 1024*1024
87 enum
88 {
89 	MAX_UNIFORM_BUFFER_SIZE = 1024,
90 	MAX_STORAGE_BUFFER_SIZE = (1<<28),
91 	MAX_SIZE = (128 * 1024)
92 };
93 
94 // \todo [mika] Add to utilities
95 template<typename T>
divRoundUp(const T & a,const T & b)96 T divRoundUp (const T& a, const T& b)
97 {
98 	return (a / b) + (a % b == 0 ? 0 : 1);
99 }
100 
101 enum Usage
102 {
103 	// Mapped host read and write
104 	USAGE_HOST_READ = (0x1u<<0),
105 	USAGE_HOST_WRITE = (0x1u<<1),
106 
107 	// Copy and other transfer operations
108 	USAGE_TRANSFER_SRC = (0x1u<<2),
109 	USAGE_TRANSFER_DST = (0x1u<<3),
110 
111 	// Buffer usage flags
112 	USAGE_INDEX_BUFFER = (0x1u<<4),
113 	USAGE_VERTEX_BUFFER = (0x1u<<5),
114 
115 	USAGE_UNIFORM_BUFFER = (0x1u<<6),
116 	USAGE_STORAGE_BUFFER = (0x1u<<7),
117 
118 	USAGE_UNIFORM_TEXEL_BUFFER = (0x1u<<8),
119 	USAGE_STORAGE_TEXEL_BUFFER = (0x1u<<9),
120 
121 	// \todo [2016-03-09 mika] This is probably almost impossible to do
122 	USAGE_INDIRECT_BUFFER = (0x1u<<10),
123 
124 	// Texture usage flags
125 	USAGE_SAMPLED_IMAGE = (0x1u<<11),
126 	USAGE_STORAGE_IMAGE = (0x1u<<12),
127 	USAGE_COLOR_ATTACHMENT = (0x1u<<13),
128 	USAGE_INPUT_ATTACHMENT = (0x1u<<14),
129 	USAGE_DEPTH_STENCIL_ATTACHMENT = (0x1u<<15),
130 };
131 
supportsDeviceBufferWrites(Usage usage)132 bool supportsDeviceBufferWrites (Usage usage)
133 {
134 	if (usage & USAGE_TRANSFER_DST)
135 		return true;
136 
137 	if (usage & USAGE_STORAGE_BUFFER)
138 		return true;
139 
140 	if (usage & USAGE_STORAGE_TEXEL_BUFFER)
141 		return true;
142 
143 	return false;
144 }
145 
supportsDeviceImageWrites(Usage usage)146 bool supportsDeviceImageWrites (Usage usage)
147 {
148 	if (usage & USAGE_TRANSFER_DST)
149 		return true;
150 
151 	if (usage & USAGE_STORAGE_IMAGE)
152 		return true;
153 
154 	if (usage & USAGE_COLOR_ATTACHMENT)
155 		return true;
156 
157 	return false;
158 }
159 
160 // Sequential access enums
161 enum Access
162 {
163 	ACCESS_INDIRECT_COMMAND_READ_BIT = 0,
164 	ACCESS_INDEX_READ_BIT,
165 	ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
166 	ACCESS_UNIFORM_READ_BIT,
167 	ACCESS_INPUT_ATTACHMENT_READ_BIT,
168 	ACCESS_SHADER_READ_BIT,
169 	ACCESS_SHADER_WRITE_BIT,
170 	ACCESS_COLOR_ATTACHMENT_READ_BIT,
171 	ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
172 	ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
173 	ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
174 	ACCESS_TRANSFER_READ_BIT,
175 	ACCESS_TRANSFER_WRITE_BIT,
176 	ACCESS_HOST_READ_BIT,
177 	ACCESS_HOST_WRITE_BIT,
178 	ACCESS_MEMORY_READ_BIT,
179 	ACCESS_MEMORY_WRITE_BIT,
180 
181 	ACCESS_LAST
182 };
183 
accessFlagToAccess(vk::VkAccessFlagBits flag)184 Access accessFlagToAccess (vk::VkAccessFlagBits flag)
185 {
186 	switch (flag)
187 	{
188 	case vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT:			return ACCESS_INDIRECT_COMMAND_READ_BIT;
189 	case vk::VK_ACCESS_INDEX_READ_BIT:						return ACCESS_INDEX_READ_BIT;
190 	case vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT:			return ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
191 	case vk::VK_ACCESS_UNIFORM_READ_BIT:					return ACCESS_UNIFORM_READ_BIT;
192 	case vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT:			return ACCESS_INPUT_ATTACHMENT_READ_BIT;
193 	case vk::VK_ACCESS_SHADER_READ_BIT:						return ACCESS_SHADER_READ_BIT;
194 	case vk::VK_ACCESS_SHADER_WRITE_BIT:					return ACCESS_SHADER_WRITE_BIT;
195 	case vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT:			return ACCESS_COLOR_ATTACHMENT_READ_BIT;
196 	case vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT:			return ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
197 	case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT:	return ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
198 	case vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT:	return ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
199 	case vk::VK_ACCESS_TRANSFER_READ_BIT:					return ACCESS_TRANSFER_READ_BIT;
200 	case vk::VK_ACCESS_TRANSFER_WRITE_BIT:					return ACCESS_TRANSFER_WRITE_BIT;
201 	case vk::VK_ACCESS_HOST_READ_BIT:						return ACCESS_HOST_READ_BIT;
202 	case vk::VK_ACCESS_HOST_WRITE_BIT:						return ACCESS_HOST_WRITE_BIT;
203 	case vk::VK_ACCESS_MEMORY_READ_BIT:						return ACCESS_MEMORY_READ_BIT;
204 	case vk::VK_ACCESS_MEMORY_WRITE_BIT:					return ACCESS_MEMORY_WRITE_BIT;
205 
206 	default:
207 		DE_FATAL("Unknown access flags");
208 		return ACCESS_LAST;
209 	}
210 }
211 
212 // Sequential stage enums
213 enum PipelineStage
214 {
215 	PIPELINESTAGE_TOP_OF_PIPE_BIT = 0,
216 	PIPELINESTAGE_BOTTOM_OF_PIPE_BIT,
217 	PIPELINESTAGE_DRAW_INDIRECT_BIT,
218 	PIPELINESTAGE_VERTEX_INPUT_BIT,
219 	PIPELINESTAGE_VERTEX_SHADER_BIT,
220 	PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT,
221 	PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT,
222 	PIPELINESTAGE_GEOMETRY_SHADER_BIT,
223 	PIPELINESTAGE_FRAGMENT_SHADER_BIT,
224 	PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT,
225 	PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT,
226 	PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
227 	PIPELINESTAGE_COMPUTE_SHADER_BIT,
228 	PIPELINESTAGE_TRANSFER_BIT,
229 	PIPELINESTAGE_HOST_BIT,
230 
231 	PIPELINESTAGE_LAST
232 };
233 
pipelineStageFlagToPipelineStage(vk::VkPipelineStageFlagBits flag)234 PipelineStage pipelineStageFlagToPipelineStage (vk::VkPipelineStageFlagBits flag)
235 {
236 	switch (flag)
237 	{
238 		case vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT:						return PIPELINESTAGE_TOP_OF_PIPE_BIT;
239 		case vk::VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT:					return PIPELINESTAGE_BOTTOM_OF_PIPE_BIT;
240 		case vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT:					return PIPELINESTAGE_DRAW_INDIRECT_BIT;
241 		case vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT:					return PIPELINESTAGE_VERTEX_INPUT_BIT;
242 		case vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT:					return PIPELINESTAGE_VERTEX_SHADER_BIT;
243 		case vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT:		return PIPELINESTAGE_TESSELLATION_CONTROL_SHADER_BIT;
244 		case vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT:	return PIPELINESTAGE_TESSELLATION_EVALUATION_SHADER_BIT;
245 		case vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT:					return PIPELINESTAGE_GEOMETRY_SHADER_BIT;
246 		case vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT:					return PIPELINESTAGE_FRAGMENT_SHADER_BIT;
247 		case vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT:			return PIPELINESTAGE_EARLY_FRAGMENT_TESTS_BIT;
248 		case vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT:				return PIPELINESTAGE_LATE_FRAGMENT_TESTS_BIT;
249 		case vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT:			return PIPELINESTAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
250 		case vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT:					return PIPELINESTAGE_COMPUTE_SHADER_BIT;
251 		case vk::VK_PIPELINE_STAGE_TRANSFER_BIT:						return PIPELINESTAGE_TRANSFER_BIT;
252 		case vk::VK_PIPELINE_STAGE_HOST_BIT:							return PIPELINESTAGE_HOST_BIT;
253 
254 		default:
255 			DE_FATAL("Unknown pipeline stage flags");
256 			return PIPELINESTAGE_LAST;
257 	}
258 }
259 
operator |(Usage a,Usage b)260 Usage operator| (Usage a, Usage b)
261 {
262 	return (Usage)((deUint32)a | (deUint32)b);
263 }
264 
operator &(Usage a,Usage b)265 Usage operator& (Usage a, Usage b)
266 {
267 	return (Usage)((deUint32)a & (deUint32)b);
268 }
269 
usageToName(Usage usage)270 string usageToName (Usage usage)
271 {
272 	const struct
273 	{
274 		Usage				usage;
275 		const char* const	name;
276 	} usageNames[] =
277 	{
278 		{ USAGE_HOST_READ,					"host_read" },
279 		{ USAGE_HOST_WRITE,					"host_write" },
280 
281 		{ USAGE_TRANSFER_SRC,				"transfer_src" },
282 		{ USAGE_TRANSFER_DST,				"transfer_dst" },
283 
284 		{ USAGE_INDEX_BUFFER,				"index_buffer" },
285 		{ USAGE_VERTEX_BUFFER,				"vertex_buffer" },
286 		{ USAGE_UNIFORM_BUFFER,				"uniform_buffer" },
287 		{ USAGE_STORAGE_BUFFER,				"storage_buffer" },
288 		{ USAGE_UNIFORM_TEXEL_BUFFER,		"uniform_texel_buffer" },
289 		{ USAGE_STORAGE_TEXEL_BUFFER,		"storage_texel_buffer" },
290 		{ USAGE_INDIRECT_BUFFER,			"indirect_buffer" },
291 		{ USAGE_SAMPLED_IMAGE,				"image_sampled" },
292 		{ USAGE_STORAGE_IMAGE,				"storage_image" },
293 		{ USAGE_COLOR_ATTACHMENT,			"color_attachment" },
294 		{ USAGE_INPUT_ATTACHMENT,			"input_attachment" },
295 		{ USAGE_DEPTH_STENCIL_ATTACHMENT,	"depth_stencil_attachment" },
296 	};
297 
298 	std::ostringstream	stream;
299 	bool				first = true;
300 
301 	for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usageNames); usageNdx++)
302 	{
303 		if (usage & usageNames[usageNdx].usage)
304 		{
305 			if (!first)
306 				stream << "_";
307 			else
308 				first = false;
309 
310 			stream << usageNames[usageNdx].name;
311 		}
312 	}
313 
314 	return stream.str();
315 }
316 
usageToBufferUsageFlags(Usage usage)317 vk::VkBufferUsageFlags usageToBufferUsageFlags (Usage usage)
318 {
319 	vk::VkBufferUsageFlags flags = 0;
320 
321 	if (usage & USAGE_TRANSFER_SRC)
322 		flags |= vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
323 
324 	if (usage & USAGE_TRANSFER_DST)
325 		flags |= vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT;
326 
327 	if (usage & USAGE_INDEX_BUFFER)
328 		flags |= vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
329 
330 	if (usage & USAGE_VERTEX_BUFFER)
331 		flags |= vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
332 
333 	if (usage & USAGE_INDIRECT_BUFFER)
334 		flags |= vk::VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
335 
336 	if (usage & USAGE_UNIFORM_BUFFER)
337 		flags |= vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
338 
339 	if (usage & USAGE_STORAGE_BUFFER)
340 		flags |= vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
341 
342 	if (usage & USAGE_UNIFORM_TEXEL_BUFFER)
343 		flags |= vk::VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT;
344 
345 	if (usage & USAGE_STORAGE_TEXEL_BUFFER)
346 		flags |= vk::VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT;
347 
348 	return flags;
349 }
350 
usageToImageUsageFlags(Usage usage)351 vk::VkImageUsageFlags usageToImageUsageFlags (Usage usage)
352 {
353 	vk::VkImageUsageFlags flags = 0;
354 
355 	if (usage & USAGE_TRANSFER_SRC)
356 		flags |= vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
357 
358 	if (usage & USAGE_TRANSFER_DST)
359 		flags |= vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT;
360 
361 	if (usage & USAGE_SAMPLED_IMAGE)
362 		flags |= vk::VK_IMAGE_USAGE_SAMPLED_BIT;
363 
364 	if (usage & USAGE_STORAGE_IMAGE)
365 		flags |= vk::VK_IMAGE_USAGE_STORAGE_BIT;
366 
367 	if (usage & USAGE_COLOR_ATTACHMENT)
368 		flags |= vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
369 
370 	if (usage & USAGE_INPUT_ATTACHMENT)
371 		flags |= vk::VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT;
372 
373 	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
374 		flags |= vk::VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
375 
376 	return flags;
377 }
378 
usageToStageFlags(Usage usage)379 vk::VkPipelineStageFlags usageToStageFlags (Usage usage)
380 {
381 	vk::VkPipelineStageFlags flags = 0;
382 
383 	if (usage & (USAGE_HOST_READ|USAGE_HOST_WRITE))
384 		flags |= vk::VK_PIPELINE_STAGE_HOST_BIT;
385 
386 	if (usage & (USAGE_TRANSFER_SRC|USAGE_TRANSFER_DST))
387 		flags |= vk::VK_PIPELINE_STAGE_TRANSFER_BIT;
388 
389 	if (usage & (USAGE_VERTEX_BUFFER|USAGE_INDEX_BUFFER))
390 		flags |= vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
391 
392 	if (usage & USAGE_INDIRECT_BUFFER)
393 		flags |= vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
394 
395 	if (usage &
396 			(USAGE_UNIFORM_BUFFER
397 			| USAGE_STORAGE_BUFFER
398 			| USAGE_UNIFORM_TEXEL_BUFFER
399 			| USAGE_STORAGE_TEXEL_BUFFER
400 			| USAGE_SAMPLED_IMAGE
401 			| USAGE_STORAGE_IMAGE))
402 	{
403 		flags |= (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
404 				| vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
405 				| vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT);
406 	}
407 
408 	if (usage & USAGE_INPUT_ATTACHMENT)
409 		flags |= vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
410 
411 	if (usage & USAGE_COLOR_ATTACHMENT)
412 		flags |= vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
413 
414 	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
415 	{
416 		flags |= vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT
417 				| vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
418 	}
419 
420 	return flags;
421 }
422 
usageToAccessFlags(Usage usage)423 vk::VkAccessFlags usageToAccessFlags (Usage usage)
424 {
425 	vk::VkAccessFlags flags = 0;
426 
427 	if (usage & USAGE_HOST_READ)
428 		flags |= vk::VK_ACCESS_HOST_READ_BIT;
429 
430 	if (usage & USAGE_HOST_WRITE)
431 		flags |= vk::VK_ACCESS_HOST_WRITE_BIT;
432 
433 	if (usage & USAGE_TRANSFER_SRC)
434 		flags |= vk::VK_ACCESS_TRANSFER_READ_BIT;
435 
436 	if (usage & USAGE_TRANSFER_DST)
437 		flags |= vk::VK_ACCESS_TRANSFER_WRITE_BIT;
438 
439 	if (usage & USAGE_INDEX_BUFFER)
440 		flags |= vk::VK_ACCESS_INDEX_READ_BIT;
441 
442 	if (usage & USAGE_VERTEX_BUFFER)
443 		flags |= vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
444 
445 	if (usage & (USAGE_UNIFORM_BUFFER | USAGE_UNIFORM_TEXEL_BUFFER))
446 		flags |= vk::VK_ACCESS_UNIFORM_READ_BIT;
447 
448 	if (usage & USAGE_SAMPLED_IMAGE)
449 		flags |= vk::VK_ACCESS_SHADER_READ_BIT;
450 
451 	if (usage & (USAGE_STORAGE_BUFFER
452 				| USAGE_STORAGE_TEXEL_BUFFER
453 				| USAGE_STORAGE_IMAGE))
454 		flags |= vk::VK_ACCESS_SHADER_READ_BIT | vk::VK_ACCESS_SHADER_WRITE_BIT;
455 
456 	if (usage & USAGE_INDIRECT_BUFFER)
457 		flags |= vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
458 
459 	if (usage & USAGE_COLOR_ATTACHMENT)
460 		flags |= vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
461 
462 	if (usage & USAGE_INPUT_ATTACHMENT)
463 		flags |= vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
464 
465 	if (usage & USAGE_DEPTH_STENCIL_ATTACHMENT)
466 		flags |= vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT
467 			| vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
468 
469 	return flags;
470 }
471 
472 struct TestConfig
473 {
474 	Usage				usage;
475 	vk::VkDeviceSize	size;
476 	vk::VkSharingMode	sharing;
477 };
478 
createBeginCommandBuffer(const vk::DeviceInterface & vkd,vk::VkDevice device,vk::VkCommandPool pool,vk::VkCommandBufferLevel level)479 vk::Move<vk::VkCommandBuffer> createBeginCommandBuffer (const vk::DeviceInterface&	vkd,
480 														vk::VkDevice				device,
481 														vk::VkCommandPool			pool,
482 														vk::VkCommandBufferLevel	level)
483 {
484 	const vk::VkCommandBufferInheritanceInfo	inheritInfo	=
485 	{
486 		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
487 		DE_NULL,
488 		0,
489 		0,
490 		0,
491 		VK_FALSE,
492 		0u,
493 		0u
494 	};
495 	const vk::VkCommandBufferBeginInfo			beginInfo =
496 	{
497 		vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
498 		DE_NULL,
499 		0u,
500 		(level == vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY ? &inheritInfo : (const vk::VkCommandBufferInheritanceInfo*)DE_NULL),
501 	};
502 
503 	vk::Move<vk::VkCommandBuffer> commandBuffer (allocateCommandBuffer(vkd, device, pool, level));
504 
505 	vkd.beginCommandBuffer(*commandBuffer, &beginInfo);
506 
507 	return commandBuffer;
508 }
509 
createBuffer(const vk::DeviceInterface & vkd,vk::VkDevice device,vk::VkDeviceSize size,vk::VkBufferUsageFlags usage,vk::VkSharingMode sharingMode,const vector<deUint32> & queueFamilies)510 vk::Move<vk::VkBuffer> createBuffer (const vk::DeviceInterface&	vkd,
511 									 vk::VkDevice				device,
512 									 vk::VkDeviceSize			size,
513 									 vk::VkBufferUsageFlags		usage,
514 									 vk::VkSharingMode			sharingMode,
515 									 const vector<deUint32>&	queueFamilies)
516 {
517 	const vk::VkBufferCreateInfo	createInfo =
518 	{
519 		vk::VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
520 		DE_NULL,
521 
522 		0,	// flags
523 		size,
524 		usage,
525 		sharingMode,
526 		(deUint32)queueFamilies.size(),
527 		&queueFamilies[0]
528 	};
529 
530 	return vk::createBuffer(vkd, device, &createInfo);
531 }
532 
allocMemory(const vk::DeviceInterface & vkd,vk::VkDevice device,vk::VkDeviceSize size,deUint32 memoryTypeIndex)533 vk::Move<vk::VkDeviceMemory> allocMemory (const vk::DeviceInterface&	vkd,
534 										  vk::VkDevice					device,
535 										  vk::VkDeviceSize				size,
536 										  deUint32						memoryTypeIndex)
537 {
538 	const vk::VkMemoryAllocateInfo alloc =
539 	{
540 		vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,	// sType
541 		DE_NULL,									// pNext
542 
543 		size,
544 		memoryTypeIndex
545 	};
546 
547 	return vk::allocateMemory(vkd, device, &alloc);
548 }
549 
bindBufferMemory(const vk::InstanceInterface & vki,const vk::DeviceInterface & vkd,vk::VkPhysicalDevice physicalDevice,vk::VkDevice device,vk::VkBuffer buffer,vk::VkMemoryPropertyFlags properties)550 vk::Move<vk::VkDeviceMemory> bindBufferMemory (const vk::InstanceInterface&	vki,
551 											   const vk::DeviceInterface&	vkd,
552 											   vk::VkPhysicalDevice			physicalDevice,
553 											   vk::VkDevice					device,
554 											   vk::VkBuffer					buffer,
555 											   vk::VkMemoryPropertyFlags	properties)
556 {
557 	const vk::VkMemoryRequirements				memoryRequirements	= vk::getBufferMemoryRequirements(vkd, device, buffer);
558 	const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
559 	deUint32									memoryTypeIndex;
560 
561 	for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
562 	{
563 		if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
564 			&& (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
565 		{
566 			try
567 			{
568 				const vk::VkMemoryAllocateInfo	allocationInfo	=
569 				{
570 					vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
571 					DE_NULL,
572 					memoryRequirements.size,
573 					memoryTypeIndex
574 				};
575 				vk::Move<vk::VkDeviceMemory>	memory			(vk::allocateMemory(vkd, device, &allocationInfo));
576 
577 				VK_CHECK(vkd.bindBufferMemory(device, buffer, *memory, 0));
578 
579 				return memory;
580 			}
581 			catch (const vk::Error& error)
582 			{
583 				if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
584 					|| error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
585 				{
586 					// Try next memory type/heap if out of memory
587 				}
588 				else
589 				{
590 					// Throw all other errors forward
591 					throw;
592 				}
593 			}
594 		}
595 	}
596 
597 	TCU_FAIL("Failed to allocate memory for buffer");
598 }
599 
bindImageMemory(const vk::InstanceInterface & vki,const vk::DeviceInterface & vkd,vk::VkPhysicalDevice physicalDevice,vk::VkDevice device,vk::VkImage image,vk::VkMemoryPropertyFlags properties)600 vk::Move<vk::VkDeviceMemory> bindImageMemory (const vk::InstanceInterface&	vki,
601 											   const vk::DeviceInterface&	vkd,
602 											   vk::VkPhysicalDevice			physicalDevice,
603 											   vk::VkDevice					device,
604 											   vk::VkImage					image,
605 											   vk::VkMemoryPropertyFlags	properties)
606 {
607 	const vk::VkMemoryRequirements				memoryRequirements	= vk::getImageMemoryRequirements(vkd, device, image);
608 	const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
609 	deUint32									memoryTypeIndex;
610 
611 	for (memoryTypeIndex = 0; memoryTypeIndex < memoryProperties.memoryTypeCount; memoryTypeIndex++)
612 	{
613 		if ((memoryRequirements.memoryTypeBits & (0x1u << memoryTypeIndex))
614 			&& (memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags & properties) == properties)
615 		{
616 			try
617 			{
618 				const vk::VkMemoryAllocateInfo	allocationInfo	=
619 				{
620 					vk::VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
621 					DE_NULL,
622 					memoryRequirements.size,
623 					memoryTypeIndex
624 				};
625 				vk::Move<vk::VkDeviceMemory>	memory			(vk::allocateMemory(vkd, device, &allocationInfo));
626 
627 				VK_CHECK(vkd.bindImageMemory(device, image, *memory, 0));
628 
629 				return memory;
630 			}
631 			catch (const vk::Error& error)
632 			{
633 				if (error.getError() == vk::VK_ERROR_OUT_OF_DEVICE_MEMORY
634 					|| error.getError() == vk::VK_ERROR_OUT_OF_HOST_MEMORY)
635 				{
636 					// Try next memory type/heap if out of memory
637 				}
638 				else
639 				{
640 					// Throw all other errors forward
641 					throw;
642 				}
643 			}
644 		}
645 	}
646 
647 	TCU_FAIL("Failed to allocate memory for image");
648 }
649 
mapMemory(const vk::DeviceInterface & vkd,vk::VkDevice device,vk::VkDeviceMemory memory,vk::VkDeviceSize size)650 void* mapMemory (const vk::DeviceInterface&	vkd,
651 				 vk::VkDevice				device,
652 				 vk::VkDeviceMemory			memory,
653 				 vk::VkDeviceSize			size)
654 {
655 	void* ptr;
656 
657 	VK_CHECK(vkd.mapMemory(device, memory, 0, size, 0, &ptr));
658 
659 	return ptr;
660 }
661 
662 class ReferenceMemory
663 {
664 public:
665 			ReferenceMemory	(size_t size);
666 
667 	void	set				(size_t pos, deUint8 val);
668 	deUint8	get				(size_t pos) const;
669 	bool	isDefined		(size_t pos) const;
670 
671 	void	setDefined		(size_t offset, size_t size, const void* data);
672 	void	setUndefined	(size_t offset, size_t size);
673 	void	setData			(size_t offset, size_t size, const void* data);
674 
getSize(void) const675 	size_t	getSize			(void) const { return m_data.size(); }
676 
677 private:
678 	vector<deUint8>		m_data;
679 	vector<deUint64>	m_defined;
680 };
681 
ReferenceMemory(size_t size)682 ReferenceMemory::ReferenceMemory (size_t size)
683 	: m_data	(size, 0)
684 	, m_defined	(size / 64 + (size % 64 == 0 ? 0 : 1), 0ull)
685 {
686 }
687 
set(size_t pos,deUint8 val)688 void ReferenceMemory::set (size_t pos, deUint8 val)
689 {
690 	DE_ASSERT(pos < m_data.size());
691 
692 	m_data[pos] = val;
693 	m_defined[pos / 64] |= 0x1ull << (pos % 64);
694 }
695 
setData(size_t offset,size_t size,const void * data_)696 void ReferenceMemory::setData (size_t offset, size_t size, const void* data_)
697 {
698 	const deUint8* data = (const deUint8*)data_;
699 
700 	DE_ASSERT(offset < m_data.size());
701 	DE_ASSERT(offset + size <= m_data.size());
702 
703 	// \todo [2016-03-09 mika] Optimize
704 	for (size_t pos = 0; pos < size; pos++)
705 	{
706 		m_data[offset + pos] = data[pos];
707 		m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
708 	}
709 }
710 
setUndefined(size_t offset,size_t size)711 void ReferenceMemory::setUndefined	(size_t offset, size_t size)
712 {
713 	// \todo [2016-03-09 mika] Optimize
714 	for (size_t pos = 0; pos < size; pos++)
715 		m_defined[(offset + pos) / 64] |= 0x1ull << ((offset + pos) % 64);
716 }
717 
get(size_t pos) const718 deUint8 ReferenceMemory::get (size_t pos) const
719 {
720 	DE_ASSERT(pos < m_data.size());
721 	DE_ASSERT(isDefined(pos));
722 	return m_data[pos];
723 }
724 
isDefined(size_t pos) const725 bool ReferenceMemory::isDefined (size_t pos) const
726 {
727 	DE_ASSERT(pos < m_data.size());
728 
729 	return (m_defined[pos / 64] & (0x1ull << (pos % 64))) != 0;
730 }
731 
732 class Memory
733 {
734 public:
735 							Memory				(const vk::InstanceInterface&	vki,
736 												 const vk::DeviceInterface&		vkd,
737 												 vk::VkPhysicalDevice			physicalDevice,
738 												 vk::VkDevice					device,
739 												 vk::VkDeviceSize				size,
740 												 deUint32						memoryTypeIndex,
741 												 vk::VkDeviceSize				maxBufferSize,
742 												 deInt32						maxImageWidth,
743 												 deInt32						maxImageHeight);
744 
getSize(void) const745 	vk::VkDeviceSize		getSize				(void) const { return m_size; }
getMaxBufferSize(void) const746 	vk::VkDeviceSize		getMaxBufferSize	(void) const { return m_maxBufferSize; }
getSupportBuffers(void) const747 	bool					getSupportBuffers	(void) const { return m_maxBufferSize > 0; }
748 
getMaxImageWidth(void) const749 	deInt32					getMaxImageWidth	(void) const { return m_maxImageWidth; }
getMaxImageHeight(void) const750 	deInt32					getMaxImageHeight	(void) const { return m_maxImageHeight; }
getSupportImages(void) const751 	bool					getSupportImages	(void) const { return m_maxImageWidth > 0; }
752 
getMemoryType(void) const753 	const vk::VkMemoryType&	getMemoryType		(void) const { return m_memoryType; }
getMemoryTypeIndex(void) const754 	deUint32				getMemoryTypeIndex	(void) const { return m_memoryTypeIndex; }
getMemory(void) const755 	vk::VkDeviceMemory		getMemory			(void) const { return *m_memory; }
756 
757 private:
758 	const vk::VkDeviceSize					m_size;
759 	const deUint32							m_memoryTypeIndex;
760 	const vk::VkMemoryType					m_memoryType;
761 	const vk::Unique<vk::VkDeviceMemory>	m_memory;
762 	const vk::VkDeviceSize					m_maxBufferSize;
763 	const deInt32							m_maxImageWidth;
764 	const deInt32							m_maxImageHeight;
765 };
766 
getMemoryTypeInfo(const vk::InstanceInterface & vki,vk::VkPhysicalDevice device,deUint32 memoryTypeIndex)767 vk::VkMemoryType getMemoryTypeInfo (const vk::InstanceInterface&	vki,
768 									vk::VkPhysicalDevice			device,
769 									deUint32						memoryTypeIndex)
770 {
771 	const vk::VkPhysicalDeviceMemoryProperties memoryProperties = vk::getPhysicalDeviceMemoryProperties(vki, device);
772 
773 	DE_ASSERT(memoryTypeIndex < memoryProperties.memoryTypeCount);
774 
775 	return memoryProperties.memoryTypes[memoryTypeIndex];
776 }
777 
findMaxBufferSize(const vk::DeviceInterface & vkd,vk::VkDevice device,vk::VkBufferUsageFlags usage,vk::VkSharingMode sharingMode,const vector<deUint32> & queueFamilies,vk::VkDeviceSize memorySize,deUint32 memoryTypeIndex)778 vk::VkDeviceSize findMaxBufferSize (const vk::DeviceInterface&		vkd,
779 									vk::VkDevice					device,
780 
781 									vk::VkBufferUsageFlags			usage,
782 									vk::VkSharingMode				sharingMode,
783 									const vector<deUint32>&			queueFamilies,
784 
785 									vk::VkDeviceSize				memorySize,
786 									deUint32						memoryTypeIndex)
787 {
788 	vk::VkDeviceSize	lastSuccess	= 0;
789 	vk::VkDeviceSize	currentSize	= memorySize / 2;
790 
791 	{
792 		const vk::Unique<vk::VkBuffer>  buffer			(createBuffer(vkd, device, memorySize, usage, sharingMode, queueFamilies));
793 		const vk::VkMemoryRequirements  requirements	(vk::getBufferMemoryRequirements(vkd, device, *buffer));
794 
795 		if (requirements.size == memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
796 			return memorySize;
797 	}
798 
799 	for (vk::VkDeviceSize stepSize = memorySize / 4; currentSize > 0; stepSize /= 2)
800 	{
801 		const vk::Unique<vk::VkBuffer>	buffer			(createBuffer(vkd, device, currentSize, usage, sharingMode, queueFamilies));
802 		const vk::VkMemoryRequirements	requirements	(vk::getBufferMemoryRequirements(vkd, device, *buffer));
803 
804 		if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
805 		{
806 			lastSuccess = currentSize;
807 			currentSize += stepSize;
808 		}
809 		else
810 			currentSize -= stepSize;
811 
812 		if (stepSize == 0)
813 			break;
814 	}
815 
816 	return lastSuccess;
817 }
818 
819 // Round size down maximum W * H * 4, where W and H < 4096
roundBufferSizeToWxHx4(vk::VkDeviceSize size)820 vk::VkDeviceSize roundBufferSizeToWxHx4 (vk::VkDeviceSize size)
821 {
822 	const vk::VkDeviceSize	maxTextureSize	= 4096;
823 	vk::VkDeviceSize		maxTexelCount	= size / 4;
824 	vk::VkDeviceSize		bestW			= de::max(maxTexelCount, maxTextureSize);
825 	vk::VkDeviceSize		bestH			= maxTexelCount / bestW;
826 
827 	// \todo [2016-03-09 mika] Could probably be faster?
828 	for (vk::VkDeviceSize w = 1; w * w < maxTexelCount && w < maxTextureSize && bestW * bestH * 4 < size; w++)
829 	{
830 		const vk::VkDeviceSize h = maxTexelCount / w;
831 
832 		if (bestW * bestH < w * h)
833 		{
834 			bestW = w;
835 			bestH = h;
836 		}
837 	}
838 
839 	return bestW * bestH * 4;
840 }
841 
842 // Find RGBA8 image size that has exactly "size" of number of bytes.
843 // "size" must be W * H * 4 where W and H < 4096
findImageSizeWxHx4(vk::VkDeviceSize size)844 IVec2 findImageSizeWxHx4 (vk::VkDeviceSize size)
845 {
846 	const vk::VkDeviceSize	maxTextureSize	= 4096;
847 	vk::VkDeviceSize		texelCount		= size / 4;
848 
849 	DE_ASSERT((size % 4) == 0);
850 
851 	// \todo [2016-03-09 mika] Could probably be faster?
852 	for (vk::VkDeviceSize w = 1; w < maxTextureSize && w < texelCount; w++)
853 	{
854 		const vk::VkDeviceSize	h	= texelCount / w;
855 
856 		if ((texelCount  % w) == 0 && h < maxTextureSize)
857 			return IVec2((int)w, (int)h);
858 	}
859 
860 	DE_FATAL("Invalid size");
861 	return IVec2(-1, -1);
862 }
863 
findMaxRGBA8ImageSize(const vk::DeviceInterface & vkd,vk::VkDevice device,vk::VkImageUsageFlags usage,vk::VkSharingMode sharingMode,const vector<deUint32> & queueFamilies,vk::VkDeviceSize memorySize,deUint32 memoryTypeIndex)864 IVec2 findMaxRGBA8ImageSize (const vk::DeviceInterface&	vkd,
865 							 vk::VkDevice				device,
866 
867 							 vk::VkImageUsageFlags		usage,
868 							 vk::VkSharingMode			sharingMode,
869 							 const vector<deUint32>&	queueFamilies,
870 
871 							 vk::VkDeviceSize			memorySize,
872 							 deUint32					memoryTypeIndex)
873 {
874 	IVec2		lastSuccess		(0);
875 	IVec2		currentSize;
876 
877 	{
878 		const deUint32	texelCount	= (deUint32)(memorySize / 4);
879 		const deUint32	width		= (deUint32)deFloatSqrt((float)texelCount);
880 		const deUint32	height		= texelCount / width;
881 
882 		currentSize[0] = deMaxu32(width, height);
883 		currentSize[1] = deMinu32(width, height);
884 	}
885 
886 	for (deInt32 stepSize = currentSize[0] / 2; currentSize[0] > 0; stepSize /= 2)
887 	{
888 		const vk::VkImageCreateInfo	createInfo		=
889 		{
890 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
891 			DE_NULL,
892 
893 			0u,
894 			vk::VK_IMAGE_TYPE_2D,
895 			vk::VK_FORMAT_R8G8B8A8_UNORM,
896 			{
897 				(deUint32)currentSize[0],
898 				(deUint32)currentSize[1],
899 				1u,
900 			},
901 			1u, 1u,
902 			vk::VK_SAMPLE_COUNT_1_BIT,
903 			vk::VK_IMAGE_TILING_OPTIMAL,
904 			usage,
905 			sharingMode,
906 			(deUint32)queueFamilies.size(),
907 			&queueFamilies[0],
908 			vk::VK_IMAGE_LAYOUT_UNDEFINED
909 		};
910 		const vk::Unique<vk::VkImage>	image			(vk::createImage(vkd, device, &createInfo));
911 		const vk::VkMemoryRequirements	requirements	(vk::getImageMemoryRequirements(vkd, device, *image));
912 
913 		if (requirements.size <= memorySize && requirements.memoryTypeBits & (0x1u << memoryTypeIndex))
914 		{
915 			lastSuccess = currentSize;
916 			currentSize[0] += stepSize;
917 			currentSize[1] += stepSize;
918 		}
919 		else
920 		{
921 			currentSize[0] -= stepSize;
922 			currentSize[1] -= stepSize;
923 		}
924 
925 		if (stepSize == 0)
926 			break;
927 	}
928 
929 	return lastSuccess;
930 }
931 
Memory(const vk::InstanceInterface & vki,const vk::DeviceInterface & vkd,vk::VkPhysicalDevice physicalDevice,vk::VkDevice device,vk::VkDeviceSize size,deUint32 memoryTypeIndex,vk::VkDeviceSize maxBufferSize,deInt32 maxImageWidth,deInt32 maxImageHeight)932 Memory::Memory (const vk::InstanceInterface&	vki,
933 				const vk::DeviceInterface&		vkd,
934 				vk::VkPhysicalDevice			physicalDevice,
935 				vk::VkDevice					device,
936 				vk::VkDeviceSize				size,
937 				deUint32						memoryTypeIndex,
938 				vk::VkDeviceSize				maxBufferSize,
939 				deInt32							maxImageWidth,
940 				deInt32							maxImageHeight)
941 	: m_size			(size)
942 	, m_memoryTypeIndex	(memoryTypeIndex)
943 	, m_memoryType		(getMemoryTypeInfo(vki, physicalDevice, memoryTypeIndex))
944 	, m_memory			(allocMemory(vkd, device, size, memoryTypeIndex))
945 	, m_maxBufferSize	(maxBufferSize)
946 	, m_maxImageWidth	(maxImageWidth)
947 	, m_maxImageHeight	(maxImageHeight)
948 {
949 }
950 
951 class Context
952 {
953 public:
Context(const vk::InstanceInterface & vki,const vk::DeviceInterface & vkd,vk::VkPhysicalDevice physicalDevice,vk::VkDevice device,vk::VkQueue queue,deUint32 queueFamilyIndex,const vector<pair<deUint32,vk::VkQueue>> & queues,const vk::BinaryCollection & binaryCollection)954 													Context					(const vk::InstanceInterface&					vki,
955 																			 const vk::DeviceInterface&						vkd,
956 																			 vk::VkPhysicalDevice							physicalDevice,
957 																			 vk::VkDevice									device,
958 																			 vk::VkQueue									queue,
959 																			 deUint32										queueFamilyIndex,
960 																			 const vector<pair<deUint32, vk::VkQueue> >&	queues,
961 																			 const vk::BinaryCollection&					binaryCollection)
962 		: m_vki					(vki)
963 		, m_vkd					(vkd)
964 		, m_physicalDevice		(physicalDevice)
965 		, m_device				(device)
966 		, m_queue				(queue)
967 		, m_queueFamilyIndex	(queueFamilyIndex)
968 		, m_queues				(queues)
969 		, m_commandPool			(createCommandPool(vkd, device, vk::VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex))
970 		, m_binaryCollection	(binaryCollection)
971 	{
972 		for (size_t queueNdx = 0; queueNdx < m_queues.size(); queueNdx++)
973 			m_queueFamilies.push_back(m_queues[queueNdx].first);
974 	}
975 
getInstanceInterface(void) const976 	const vk::InstanceInterface&					getInstanceInterface	(void) const { return m_vki; }
getPhysicalDevice(void) const977 	vk::VkPhysicalDevice							getPhysicalDevice		(void) const { return m_physicalDevice; }
getDevice(void) const978 	vk::VkDevice									getDevice				(void) const { return m_device; }
getDeviceInterface(void) const979 	const vk::DeviceInterface&						getDeviceInterface		(void) const { return m_vkd; }
getQueue(void) const980 	vk::VkQueue										getQueue				(void) const { return m_queue; }
getQueueFamily(void) const981 	deUint32										getQueueFamily			(void) const { return m_queueFamilyIndex; }
getQueues(void) const982 	const vector<pair<deUint32, vk::VkQueue> >&		getQueues				(void) const { return m_queues; }
getQueueFamilies(void) const983 	const vector<deUint32>							getQueueFamilies		(void) const { return m_queueFamilies; }
getCommandPool(void) const984 	vk::VkCommandPool								getCommandPool			(void) const { return *m_commandPool; }
getBinaryCollection(void) const985 	const vk::BinaryCollection&						getBinaryCollection		(void) const { return m_binaryCollection; }
986 
987 private:
988 	const vk::InstanceInterface&					m_vki;
989 	const vk::DeviceInterface&						m_vkd;
990 	const vk::VkPhysicalDevice						m_physicalDevice;
991 	const vk::VkDevice								m_device;
992 	const vk::VkQueue								m_queue;
993 	const deUint32									m_queueFamilyIndex;
994 	const vector<pair<deUint32, vk::VkQueue> >		m_queues;
995 	const vk::Unique<vk::VkCommandPool>				m_commandPool;
996 	const vk::BinaryCollection&						m_binaryCollection;
997 	vector<deUint32>								m_queueFamilies;
998 };
999 
1000 class PrepareContext
1001 {
1002 public:
PrepareContext(const Context & context,const Memory & memory)1003 													PrepareContext			(const Context&	context,
1004 																			 const Memory&	memory)
1005 		: m_context	(context)
1006 		, m_memory	(memory)
1007 	{
1008 	}
1009 
getMemory(void) const1010 	const Memory&									getMemory				(void) const { return m_memory; }
getContext(void) const1011 	const Context&									getContext				(void) const { return m_context; }
getBinaryCollection(void) const1012 	const vk::BinaryCollection&						getBinaryCollection		(void) const { return m_context.getBinaryCollection(); }
1013 
setBuffer(vk::Move<vk::VkBuffer> buffer,vk::VkDeviceSize size)1014 	void				setBuffer		(vk::Move<vk::VkBuffer>	buffer,
1015 										 vk::VkDeviceSize		size)
1016 	{
1017 		DE_ASSERT(!m_currentImage);
1018 		DE_ASSERT(!m_currentBuffer);
1019 
1020 		m_currentBuffer		= buffer;
1021 		m_currentBufferSize	= size;
1022 	}
1023 
getBuffer(void) const1024 	vk::VkBuffer		getBuffer		(void) const { return *m_currentBuffer; }
getBufferSize(void) const1025 	vk::VkDeviceSize	getBufferSize	(void) const
1026 	{
1027 		DE_ASSERT(m_currentBuffer);
1028 		return m_currentBufferSize;
1029 	}
1030 
releaseBuffer(void)1031 	void				releaseBuffer	(void) { m_currentBuffer.disown(); }
1032 
setImage(vk::Move<vk::VkImage> image,vk::VkImageLayout layout,vk::VkDeviceSize memorySize,deInt32 width,deInt32 height)1033 	void				setImage		(vk::Move<vk::VkImage>	image,
1034 										 vk::VkImageLayout		layout,
1035 										 vk::VkDeviceSize		memorySize,
1036 										 deInt32				width,
1037 										 deInt32				height)
1038 	{
1039 		DE_ASSERT(!m_currentImage);
1040 		DE_ASSERT(!m_currentBuffer);
1041 
1042 		m_currentImage				= image;
1043 		m_currentImageMemorySize	= memorySize;
1044 		m_currentImageLayout		= layout;
1045 		m_currentImageWidth			= width;
1046 		m_currentImageHeight		= height;
1047 	}
1048 
setImageLayout(vk::VkImageLayout layout)1049 	void				setImageLayout	(vk::VkImageLayout layout)
1050 	{
1051 		DE_ASSERT(m_currentImage);
1052 		m_currentImageLayout = layout;
1053 	}
1054 
getImage(void) const1055 	vk::VkImage			getImage		(void) const { return *m_currentImage; }
getImageWidth(void) const1056 	deInt32				getImageWidth	(void) const
1057 	{
1058 		DE_ASSERT(m_currentImage);
1059 		return m_currentImageWidth;
1060 	}
getImageHeight(void) const1061 	deInt32				getImageHeight	(void) const
1062 	{
1063 		DE_ASSERT(m_currentImage);
1064 		return m_currentImageHeight;
1065 	}
getImageMemorySize(void) const1066 	vk::VkDeviceSize	getImageMemorySize	(void) const
1067 	{
1068 		DE_ASSERT(m_currentImage);
1069 		return m_currentImageMemorySize;
1070 	}
1071 
releaseImage(void)1072 	void				releaseImage	(void) { m_currentImage.disown(); }
1073 
getImageLayout(void) const1074 	vk::VkImageLayout	getImageLayout	(void) const
1075 	{
1076 		DE_ASSERT(m_currentImage);
1077 		return m_currentImageLayout;
1078 	}
1079 
1080 private:
1081 	const Context&			m_context;
1082 	const Memory&			m_memory;
1083 
1084 	vk::Move<vk::VkBuffer>	m_currentBuffer;
1085 	vk::VkDeviceSize		m_currentBufferSize;
1086 
1087 	vk::Move<vk::VkImage>	m_currentImage;
1088 	vk::VkDeviceSize		m_currentImageMemorySize;
1089 	vk::VkImageLayout		m_currentImageLayout;
1090 	deInt32					m_currentImageWidth;
1091 	deInt32					m_currentImageHeight;
1092 };
1093 
1094 class ExecuteContext
1095 {
1096 public:
ExecuteContext(const Context & context)1097 					ExecuteContext	(const Context&	context)
1098 		: m_context	(context)
1099 	{
1100 	}
1101 
getContext(void) const1102 	const Context&	getContext		(void) const { return m_context; }
setMapping(void * ptr)1103 	void			setMapping		(void* ptr) { m_mapping = ptr; }
getMapping(void) const1104 	void*			getMapping		(void) const { return m_mapping; }
1105 
1106 private:
1107 	const Context&	m_context;
1108 	void*			m_mapping;
1109 };
1110 
1111 class VerifyContext
1112 {
1113 public:
VerifyContext(TestLog & log,tcu::ResultCollector & resultCollector,const Context & context,vk::VkDeviceSize size)1114 							VerifyContext		(TestLog&				log,
1115 												 tcu::ResultCollector&	resultCollector,
1116 												 const Context&			context,
1117 												 vk::VkDeviceSize		size)
1118 		: m_log				(log)
1119 		, m_resultCollector	(resultCollector)
1120 		, m_context			(context)
1121 		, m_reference		((size_t)size)
1122 	{
1123 	}
1124 
getContext(void) const1125 	const Context&			getContext			(void) const { return m_context; }
getLog(void) const1126 	TestLog&				getLog				(void) const { return m_log; }
getResultCollector(void) const1127 	tcu::ResultCollector&	getResultCollector	(void) const { return m_resultCollector; }
1128 
getReference(void)1129 	ReferenceMemory&		getReference		(void) { return m_reference; }
getReferenceImage(void)1130 	TextureLevel&			getReferenceImage	(void) { return m_referenceImage;}
1131 
1132 private:
1133 	TestLog&				m_log;
1134 	tcu::ResultCollector&	m_resultCollector;
1135 	const Context&			m_context;
1136 	ReferenceMemory			m_reference;
1137 	TextureLevel			m_referenceImage;
1138 };
1139 
1140 class Command
1141 {
1142 public:
1143 	// Constructor should allocate all non-vulkan resources.
~Command(void)1144 	virtual				~Command	(void) {}
1145 
1146 	// Get name of the command
1147 	virtual const char*	getName		(void) const = 0;
1148 
1149 	// Log prepare operations
logPrepare(TestLog &,size_t) const1150 	virtual void		logPrepare	(TestLog&, size_t) const {}
1151 	// Log executed operations
logExecute(TestLog &,size_t) const1152 	virtual void		logExecute	(TestLog&, size_t) const {}
1153 
1154 	// Prepare should allocate all vulkan resources and resources that require
1155 	// that buffer or memory has been already allocated. This should build all
1156 	// command buffers etc.
prepare(PrepareContext &)1157 	virtual void		prepare		(PrepareContext&) {}
1158 
1159 	// Execute command. Write or read mapped memory, submit commands to queue
1160 	// etc.
execute(ExecuteContext &)1161 	virtual void		execute		(ExecuteContext&) {}
1162 
1163 	// Verify that results are correct.
verify(VerifyContext &,size_t)1164 	virtual void		verify		(VerifyContext&, size_t) {}
1165 
1166 protected:
1167 	// Allow only inheritance
Command(void)1168 						Command		(void) {}
1169 
1170 private:
1171 	// Disallow copying
1172 						Command		(const Command&);
1173 	Command&			operator&	(const Command&);
1174 };
1175 
1176 class Map : public Command
1177 {
1178 public:
Map(void)1179 						Map			(void) {}
~Map(void)1180 						~Map		(void) {}
getName(void) const1181 	const char*			getName		(void) const { return "Map"; }
1182 
1183 
logExecute(TestLog & log,size_t commandIndex) const1184 	void				logExecute	(TestLog& log, size_t commandIndex) const
1185 	{
1186 		log << TestLog::Message << commandIndex << ":" << getName() << " Map memory" << TestLog::EndMessage;
1187 	}
1188 
prepare(PrepareContext & context)1189 	void				prepare		(PrepareContext& context)
1190 	{
1191 		m_memory	= context.getMemory().getMemory();
1192 		m_size		= context.getMemory().getSize();
1193 	}
1194 
execute(ExecuteContext & context)1195 	void				execute		(ExecuteContext& context)
1196 	{
1197 		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1198 		const vk::VkDevice			device	= context.getContext().getDevice();
1199 
1200 		context.setMapping(mapMemory(vkd, device, m_memory, m_size));
1201 	}
1202 
1203 private:
1204 	vk::VkDeviceMemory	m_memory;
1205 	vk::VkDeviceSize	m_size;
1206 };
1207 
1208 class UnMap : public Command
1209 {
1210 public:
UnMap(void)1211 						UnMap		(void) {}
~UnMap(void)1212 						~UnMap		(void) {}
getName(void) const1213 	const char*			getName		(void) const { return "UnMap"; }
1214 
logExecute(TestLog & log,size_t commandIndex) const1215 	void				logExecute	(TestLog& log, size_t commandIndex) const
1216 	{
1217 		log << TestLog::Message << commandIndex << ": Unmap memory" << TestLog::EndMessage;
1218 	}
1219 
prepare(PrepareContext & context)1220 	void				prepare		(PrepareContext& context)
1221 	{
1222 		m_memory	= context.getMemory().getMemory();
1223 	}
1224 
execute(ExecuteContext & context)1225 	void				execute		(ExecuteContext& context)
1226 	{
1227 		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1228 		const vk::VkDevice			device	= context.getContext().getDevice();
1229 
1230 		vkd.unmapMemory(device, m_memory);
1231 		context.setMapping(DE_NULL);
1232 	}
1233 
1234 private:
1235 	vk::VkDeviceMemory	m_memory;
1236 };
1237 
1238 class Invalidate : public Command
1239 {
1240 public:
Invalidate(void)1241 						Invalidate	(void) {}
~Invalidate(void)1242 						~Invalidate	(void) {}
getName(void) const1243 	const char*			getName		(void) const { return "Invalidate"; }
1244 
logExecute(TestLog & log,size_t commandIndex) const1245 	void				logExecute	(TestLog& log, size_t commandIndex) const
1246 	{
1247 		log << TestLog::Message << commandIndex << ": Invalidate mapped memory" << TestLog::EndMessage;
1248 	}
1249 
prepare(PrepareContext & context)1250 	void				prepare		(PrepareContext& context)
1251 	{
1252 		m_memory	= context.getMemory().getMemory();
1253 		m_size		= context.getMemory().getSize();
1254 	}
1255 
execute(ExecuteContext & context)1256 	void				execute		(ExecuteContext& context)
1257 	{
1258 		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1259 		const vk::VkDevice			device	= context.getContext().getDevice();
1260 
1261 		vk::invalidateMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1262 	}
1263 
1264 private:
1265 	vk::VkDeviceMemory	m_memory;
1266 	vk::VkDeviceSize	m_size;
1267 };
1268 
1269 class Flush : public Command
1270 {
1271 public:
Flush(void)1272 						Flush		(void) {}
~Flush(void)1273 						~Flush		(void) {}
getName(void) const1274 	const char*			getName		(void) const { return "Flush"; }
1275 
logExecute(TestLog & log,size_t commandIndex) const1276 	void				logExecute	(TestLog& log, size_t commandIndex) const
1277 	{
1278 		log << TestLog::Message << commandIndex << ": Flush mapped memory" << TestLog::EndMessage;
1279 	}
1280 
prepare(PrepareContext & context)1281 	void				prepare		(PrepareContext& context)
1282 	{
1283 		m_memory	= context.getMemory().getMemory();
1284 		m_size		= context.getMemory().getSize();
1285 	}
1286 
execute(ExecuteContext & context)1287 	void				execute		(ExecuteContext& context)
1288 	{
1289 		const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1290 		const vk::VkDevice			device	= context.getContext().getDevice();
1291 
1292 		vk::flushMappedMemoryRange(vkd, device, m_memory, 0, m_size);
1293 	}
1294 
1295 private:
1296 	vk::VkDeviceMemory	m_memory;
1297 	vk::VkDeviceSize	m_size;
1298 };
1299 
1300 // Host memory reads and writes
1301 class HostMemoryAccess : public Command
1302 {
1303 public:
1304 					HostMemoryAccess	(bool read, bool write, deUint32 seed);
~HostMemoryAccess(void)1305 					~HostMemoryAccess	(void) {}
getName(void) const1306 	const char*		getName				(void) const { return "HostMemoryAccess"; }
1307 
1308 	void			logExecute			(TestLog& log, size_t commandIndex) const;
1309 	void			prepare				(PrepareContext& context);
1310 	void			execute				(ExecuteContext& context);
1311 	void			verify				(VerifyContext& context, size_t commandIndex);
1312 
1313 private:
1314 	const bool		m_read;
1315 	const bool		m_write;
1316 	const deUint32	m_seed;
1317 
1318 	size_t			m_size;
1319 	vector<deUint8>	m_readData;
1320 };
1321 
HostMemoryAccess(bool read,bool write,deUint32 seed)1322 HostMemoryAccess::HostMemoryAccess (bool read, bool write, deUint32 seed)
1323 	: m_read	(read)
1324 	, m_write	(write)
1325 	, m_seed	(seed)
1326 {
1327 }
1328 
logExecute(TestLog & log,size_t commandIndex) const1329 void HostMemoryAccess::logExecute (TestLog& log, size_t commandIndex) const
1330 {
1331 	log << TestLog::Message << commandIndex << ": Host memory access:" << (m_read ? " read" : "") << (m_write ? " write" : "")  << ", seed: " << m_seed << TestLog::EndMessage;
1332 }
1333 
prepare(PrepareContext & context)1334 void HostMemoryAccess::prepare (PrepareContext& context)
1335 {
1336 	m_size = (size_t)context.getMemory().getSize();
1337 
1338 	if (m_read)
1339 		m_readData.resize(m_size, 0);
1340 }
1341 
execute(ExecuteContext & context)1342 void HostMemoryAccess::execute (ExecuteContext& context)
1343 {
1344 	if (m_read && m_write)
1345 	{
1346 		de::Random		rng	(m_seed);
1347 		deUint8* const	ptr	= (deUint8*)context.getMapping();
1348 		if (m_size >= ONE_MEGABYTE)
1349 		{
1350 			deMemcpy(&m_readData[0], ptr, m_size);
1351 			for (size_t pos = 0; pos < m_size; ++pos)
1352 			{
1353 				ptr[pos] = m_readData[pos] ^ rng.getUint8();
1354 			}
1355 		}
1356 		else
1357 		{
1358 			for (size_t pos = 0; pos < m_size; ++pos)
1359 			{
1360 				const deUint8	mask	= rng.getUint8();
1361 				const deUint8	value	= ptr[pos];
1362 
1363 				m_readData[pos] = value;
1364 				ptr[pos] = value ^ mask;
1365 			}
1366 		}
1367 	}
1368 	else if (m_read)
1369 	{
1370 		const deUint8* const	ptr = (deUint8*)context.getMapping();
1371 		if (m_size >= ONE_MEGABYTE)
1372 		{
1373 			deMemcpy(&m_readData[0], ptr, m_size);
1374 		}
1375 		else
1376 		{
1377 			for (size_t pos = 0; pos < m_size; ++pos)
1378 			{
1379 				m_readData[pos] = ptr[pos];
1380 			}
1381 		}
1382 	}
1383 	else if (m_write)
1384 	{
1385 		de::Random		rng	(m_seed);
1386 		deUint8* const	ptr	= (deUint8*)context.getMapping();
1387 		for (size_t pos = 0; pos < m_size; ++pos)
1388 		{
1389 			ptr[pos] = rng.getUint8();
1390 		}
1391 	}
1392 	else
1393 		DE_FATAL("Host memory access without read or write.");
1394 }
1395 
verify(VerifyContext & context,size_t commandIndex)1396 void HostMemoryAccess::verify (VerifyContext& context, size_t commandIndex)
1397 {
1398 	tcu::ResultCollector&	resultCollector	= context.getResultCollector();
1399 	ReferenceMemory&		reference		= context.getReference();
1400 	de::Random				rng				(m_seed);
1401 
1402 	if (m_read && m_write)
1403 	{
1404 		for (size_t pos = 0; pos < m_size; pos++)
1405 		{
1406 			const deUint8	mask	= rng.getUint8();
1407 			const deUint8	value	= m_readData[pos];
1408 
1409 			if (reference.isDefined(pos))
1410 			{
1411 				if (value != reference.get(pos))
1412 				{
1413 					resultCollector.fail(
1414 							de::toString(commandIndex) + ":" + getName()
1415 							+ " Result differs from reference, Expected: "
1416 							+ de::toString(tcu::toHex<8>(reference.get(pos)))
1417 							+ ", Got: "
1418 							+ de::toString(tcu::toHex<8>(value))
1419 							+ ", At offset: "
1420 							+ de::toString(pos));
1421 					break;
1422 				}
1423 
1424 				reference.set(pos, reference.get(pos) ^ mask);
1425 			}
1426 		}
1427 	}
1428 	else if (m_read)
1429 	{
1430 		for (size_t pos = 0; pos < m_size; pos++)
1431 		{
1432 			const deUint8	value	= m_readData[pos];
1433 
1434 			if (reference.isDefined(pos))
1435 			{
1436 				if (value != reference.get(pos))
1437 				{
1438 					resultCollector.fail(
1439 							de::toString(commandIndex) + ":" + getName()
1440 							+ " Result differs from reference, Expected: "
1441 							+ de::toString(tcu::toHex<8>(reference.get(pos)))
1442 							+ ", Got: "
1443 							+ de::toString(tcu::toHex<8>(value))
1444 							+ ", At offset: "
1445 							+ de::toString(pos));
1446 					break;
1447 				}
1448 			}
1449 		}
1450 	}
1451 	else if (m_write)
1452 	{
1453 		for (size_t pos = 0; pos < m_size; pos++)
1454 		{
1455 			const deUint8	value	= rng.getUint8();
1456 
1457 			reference.set(pos, value);
1458 		}
1459 	}
1460 	else
1461 		DE_FATAL("Host memory access without read or write.");
1462 }
1463 
1464 class CreateBuffer : public Command
1465 {
1466 public:
1467 									CreateBuffer	(vk::VkBufferUsageFlags	usage,
1468 													 vk::VkSharingMode		sharing);
~CreateBuffer(void)1469 									~CreateBuffer	(void) {}
getName(void) const1470 	const char*						getName			(void) const { return "CreateBuffer"; }
1471 
1472 	void							logPrepare		(TestLog& log, size_t commandIndex) const;
1473 	void							prepare			(PrepareContext& context);
1474 
1475 private:
1476 	const vk::VkBufferUsageFlags	m_usage;
1477 	const vk::VkSharingMode			m_sharing;
1478 };
1479 
CreateBuffer(vk::VkBufferUsageFlags usage,vk::VkSharingMode sharing)1480 CreateBuffer::CreateBuffer (vk::VkBufferUsageFlags	usage,
1481 							vk::VkSharingMode		sharing)
1482 	: m_usage	(usage)
1483 	, m_sharing	(sharing)
1484 {
1485 }
1486 
logPrepare(TestLog & log,size_t commandIndex) const1487 void CreateBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1488 {
1489 	log << TestLog::Message << commandIndex << ":" << getName() << " Create buffer, Sharing mode: " << m_sharing << ", Usage: " << vk::getBufferUsageFlagsStr(m_usage) << TestLog::EndMessage;
1490 }
1491 
prepare(PrepareContext & context)1492 void CreateBuffer::prepare (PrepareContext& context)
1493 {
1494 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1495 	const vk::VkDevice			device			= context.getContext().getDevice();
1496 	const vk::VkDeviceSize		bufferSize		= context.getMemory().getMaxBufferSize();
1497 	const vector<deUint32>&		queueFamilies	= context.getContext().getQueueFamilies();
1498 
1499 	context.setBuffer(createBuffer(vkd, device, bufferSize, m_usage, m_sharing, queueFamilies), bufferSize);
1500 }
1501 
1502 class DestroyBuffer : public Command
1503 {
1504 public:
1505 							DestroyBuffer	(void);
~DestroyBuffer(void)1506 							~DestroyBuffer	(void) {}
getName(void) const1507 	const char*				getName			(void) const { return "DestroyBuffer"; }
1508 
1509 	void					logExecute		(TestLog& log, size_t commandIndex) const;
1510 	void					prepare			(PrepareContext& context);
1511 	void					execute			(ExecuteContext& context);
1512 
1513 private:
1514 	vk::Move<vk::VkBuffer>	m_buffer;
1515 };
1516 
DestroyBuffer(void)1517 DestroyBuffer::DestroyBuffer (void)
1518 {
1519 }
1520 
prepare(PrepareContext & context)1521 void DestroyBuffer::prepare (PrepareContext& context)
1522 {
1523 	m_buffer = vk::Move<vk::VkBuffer>(vk::check(context.getBuffer()), vk::Deleter<vk::VkBuffer>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1524 	context.releaseBuffer();
1525 }
1526 
logExecute(TestLog & log,size_t commandIndex) const1527 void DestroyBuffer::logExecute (TestLog& log, size_t commandIndex) const
1528 {
1529 	log << TestLog::Message << commandIndex << ":" << getName() << " Destroy buffer" << TestLog::EndMessage;
1530 }
1531 
execute(ExecuteContext & context)1532 void DestroyBuffer::execute (ExecuteContext& context)
1533 {
1534 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1535 	const vk::VkDevice			device			= context.getContext().getDevice();
1536 
1537 	vkd.destroyBuffer(device, m_buffer.disown(), DE_NULL);
1538 }
1539 
1540 class BindBufferMemory : public Command
1541 {
1542 public:
BindBufferMemory(void)1543 				BindBufferMemory	(void) {}
~BindBufferMemory(void)1544 				~BindBufferMemory	(void) {}
getName(void) const1545 	const char*	getName				(void) const { return "BindBufferMemory"; }
1546 
1547 	void		logPrepare			(TestLog& log, size_t commandIndex) const;
1548 	void		prepare				(PrepareContext& context);
1549 };
1550 
logPrepare(TestLog & log,size_t commandIndex) const1551 void BindBufferMemory::logPrepare (TestLog& log, size_t commandIndex) const
1552 {
1553 	log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to buffer" << TestLog::EndMessage;
1554 }
1555 
prepare(PrepareContext & context)1556 void BindBufferMemory::prepare (PrepareContext& context)
1557 {
1558 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1559 	const vk::VkDevice			device			= context.getContext().getDevice();
1560 
1561 	VK_CHECK(vkd.bindBufferMemory(device, context.getBuffer(), context.getMemory().getMemory(), 0));
1562 }
1563 
1564 class CreateImage : public Command
1565 {
1566 public:
1567 									CreateImage		(vk::VkImageUsageFlags	usage,
1568 													 vk::VkSharingMode		sharing);
~CreateImage(void)1569 									~CreateImage	(void) {}
getName(void) const1570 	const char*						getName			(void) const { return "CreateImage"; }
1571 
1572 	void							logPrepare		(TestLog& log, size_t commandIndex) const;
1573 	void							prepare			(PrepareContext& context);
1574 	void							verify			(VerifyContext& context, size_t commandIndex);
1575 
1576 private:
1577 	const vk::VkImageUsageFlags	m_usage;
1578 	const vk::VkSharingMode		m_sharing;
1579 	deInt32						m_imageWidth;
1580 	deInt32						m_imageHeight;
1581 };
1582 
CreateImage(vk::VkImageUsageFlags usage,vk::VkSharingMode sharing)1583 CreateImage::CreateImage (vk::VkImageUsageFlags	usage,
1584 						  vk::VkSharingMode		sharing)
1585 	: m_usage	(usage)
1586 	, m_sharing	(sharing)
1587 {
1588 }
1589 
logPrepare(TestLog & log,size_t commandIndex) const1590 void CreateImage::logPrepare (TestLog& log, size_t commandIndex) const
1591 {
1592 	log << TestLog::Message << commandIndex << ":" << getName() << " Create image, sharing: " << m_sharing << ", usage: " << vk::getImageUsageFlagsStr(m_usage)  << TestLog::EndMessage;
1593 }
1594 
prepare(PrepareContext & context)1595 void CreateImage::prepare (PrepareContext& context)
1596 {
1597 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1598 	const vk::VkDevice			device			= context.getContext().getDevice();
1599 	const vector<deUint32>&		queueFamilies	= context.getContext().getQueueFamilies();
1600 
1601 	m_imageWidth	= context.getMemory().getMaxImageWidth();
1602 	m_imageHeight	= context.getMemory().getMaxImageHeight();
1603 
1604 	{
1605 		const vk::VkImageCreateInfo	createInfo		=
1606 		{
1607 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
1608 			DE_NULL,
1609 
1610 			0u,
1611 			vk::VK_IMAGE_TYPE_2D,
1612 			vk::VK_FORMAT_R8G8B8A8_UNORM,
1613 			{
1614 				(deUint32)m_imageWidth,
1615 				(deUint32)m_imageHeight,
1616 				1u,
1617 			},
1618 			1u, 1u,
1619 			vk::VK_SAMPLE_COUNT_1_BIT,
1620 			vk::VK_IMAGE_TILING_OPTIMAL,
1621 			m_usage,
1622 			m_sharing,
1623 			(deUint32)queueFamilies.size(),
1624 			&queueFamilies[0],
1625 			vk::VK_IMAGE_LAYOUT_UNDEFINED
1626 		};
1627 		vk::Move<vk::VkImage>			image			(createImage(vkd, device, &createInfo));
1628 		const vk::VkMemoryRequirements	requirements	= vk::getImageMemoryRequirements(vkd, device, *image);
1629 
1630 		context.setImage(image, vk::VK_IMAGE_LAYOUT_UNDEFINED, requirements.size, m_imageWidth, m_imageHeight);
1631 	}
1632 }
1633 
verify(VerifyContext & context,size_t)1634 void CreateImage::verify (VerifyContext& context, size_t)
1635 {
1636 	context.getReferenceImage() = TextureLevel(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight);
1637 }
1638 
1639 class DestroyImage : public Command
1640 {
1641 public:
1642 							DestroyImage	(void);
~DestroyImage(void)1643 							~DestroyImage	(void) {}
getName(void) const1644 	const char*				getName			(void) const { return "DestroyImage"; }
1645 
1646 	void					logExecute		(TestLog& log, size_t commandIndex) const;
1647 	void					prepare			(PrepareContext& context);
1648 	void					execute			(ExecuteContext& context);
1649 
1650 private:
1651 	vk::Move<vk::VkImage>	m_image;
1652 };
1653 
DestroyImage(void)1654 DestroyImage::DestroyImage (void)
1655 {
1656 }
1657 
prepare(PrepareContext & context)1658 void DestroyImage::prepare (PrepareContext& context)
1659 {
1660 	m_image = vk::Move<vk::VkImage>(vk::check(context.getImage()), vk::Deleter<vk::VkImage>(context.getContext().getDeviceInterface(), context.getContext().getDevice(), DE_NULL));
1661 	context.releaseImage();
1662 }
1663 
1664 
logExecute(TestLog & log,size_t commandIndex) const1665 void DestroyImage::logExecute (TestLog& log, size_t commandIndex) const
1666 {
1667 	log << TestLog::Message << commandIndex << ":" << getName() << " Destroy image" << TestLog::EndMessage;
1668 }
1669 
execute(ExecuteContext & context)1670 void DestroyImage::execute (ExecuteContext& context)
1671 {
1672 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
1673 	const vk::VkDevice			device			= context.getContext().getDevice();
1674 
1675 	vkd.destroyImage(device, m_image.disown(), DE_NULL);
1676 }
1677 
1678 class BindImageMemory : public Command
1679 {
1680 public:
BindImageMemory(void)1681 				BindImageMemory		(void) {}
~BindImageMemory(void)1682 				~BindImageMemory	(void) {}
getName(void) const1683 	const char*	getName				(void) const { return "BindImageMemory"; }
1684 
1685 	void		logPrepare			(TestLog& log, size_t commandIndex) const;
1686 	void		prepare				(PrepareContext& context);
1687 };
1688 
logPrepare(TestLog & log,size_t commandIndex) const1689 void BindImageMemory::logPrepare (TestLog& log, size_t commandIndex) const
1690 {
1691 	log << TestLog::Message << commandIndex << ":" << getName() << " Bind memory to image" << TestLog::EndMessage;
1692 }
1693 
prepare(PrepareContext & context)1694 void BindImageMemory::prepare (PrepareContext& context)
1695 {
1696 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
1697 	const vk::VkDevice				device			= context.getContext().getDevice();
1698 
1699 	VK_CHECK(vkd.bindImageMemory(device, context.getImage(), context.getMemory().getMemory(), 0));
1700 }
1701 
1702 class QueueWaitIdle : public Command
1703 {
1704 public:
QueueWaitIdle(void)1705 				QueueWaitIdle	(void) {}
~QueueWaitIdle(void)1706 				~QueueWaitIdle	(void) {}
getName(void) const1707 	const char*	getName			(void) const { return "QueuetWaitIdle"; }
1708 
1709 	void		logExecute		(TestLog& log, size_t commandIndex) const;
1710 	void		execute			(ExecuteContext& context);
1711 };
1712 
logExecute(TestLog & log,size_t commandIndex) const1713 void QueueWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1714 {
1715 	log << TestLog::Message << commandIndex << ":" << getName() << " Queue wait idle" << TestLog::EndMessage;
1716 }
1717 
execute(ExecuteContext & context)1718 void QueueWaitIdle::execute (ExecuteContext& context)
1719 {
1720 	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1721 	const vk::VkQueue			queue	= context.getContext().getQueue();
1722 
1723 	VK_CHECK(vkd.queueWaitIdle(queue));
1724 }
1725 
1726 class DeviceWaitIdle : public Command
1727 {
1728 public:
DeviceWaitIdle(void)1729 				DeviceWaitIdle	(void) {}
~DeviceWaitIdle(void)1730 				~DeviceWaitIdle	(void) {}
getName(void) const1731 	const char*	getName			(void) const { return "DeviceWaitIdle"; }
1732 
1733 	void		logExecute		(TestLog& log, size_t commandIndex) const;
1734 	void		execute			(ExecuteContext& context);
1735 };
1736 
logExecute(TestLog & log,size_t commandIndex) const1737 void DeviceWaitIdle::logExecute (TestLog& log, size_t commandIndex) const
1738 {
1739 	log << TestLog::Message << commandIndex << ":" << getName() << " Device wait idle" << TestLog::EndMessage;
1740 }
1741 
execute(ExecuteContext & context)1742 void DeviceWaitIdle::execute (ExecuteContext& context)
1743 {
1744 	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1745 	const vk::VkDevice			device	= context.getContext().getDevice();
1746 
1747 	VK_CHECK(vkd.deviceWaitIdle(device));
1748 }
1749 
1750 class SubmitContext
1751 {
1752 public:
SubmitContext(const PrepareContext & context,const vk::VkCommandBuffer commandBuffer)1753 								SubmitContext		(const PrepareContext&		context,
1754 													 const vk::VkCommandBuffer	commandBuffer)
1755 		: m_context			(context)
1756 		, m_commandBuffer	(commandBuffer)
1757 	{
1758 	}
1759 
getMemory(void) const1760 	const Memory&				getMemory			(void) const { return m_context.getMemory(); }
getContext(void) const1761 	const Context&				getContext			(void) const { return m_context.getContext(); }
getCommandBuffer(void) const1762 	vk::VkCommandBuffer			getCommandBuffer	(void) const { return m_commandBuffer; }
1763 
getBuffer(void) const1764 	vk::VkBuffer				getBuffer			(void) const { return m_context.getBuffer(); }
getBufferSize(void) const1765 	vk::VkDeviceSize			getBufferSize		(void) const { return m_context.getBufferSize(); }
1766 
getImage(void) const1767 	vk::VkImage					getImage			(void) const { return m_context.getImage(); }
getImageWidth(void) const1768 	deInt32						getImageWidth		(void) const { return m_context.getImageWidth(); }
getImageHeight(void) const1769 	deInt32						getImageHeight		(void) const { return m_context.getImageHeight(); }
1770 
1771 private:
1772 	const PrepareContext&		m_context;
1773 	const vk::VkCommandBuffer	m_commandBuffer;
1774 };
1775 
1776 class CmdCommand
1777 {
1778 public:
~CmdCommand(void)1779 	virtual				~CmdCommand	(void) {}
1780 	virtual const char*	getName		(void) const = 0;
1781 
1782 	// Log things that are done during prepare
logPrepare(TestLog &,size_t) const1783 	virtual void		logPrepare	(TestLog&, size_t) const {}
1784 	// Log submitted calls etc.
logSubmit(TestLog &,size_t) const1785 	virtual void		logSubmit	(TestLog&, size_t) const {}
1786 
1787 	// Allocate vulkan resources and prepare for submit.
prepare(PrepareContext &)1788 	virtual void		prepare		(PrepareContext&) {}
1789 
1790 	// Submit commands to command buffer.
submit(SubmitContext &)1791 	virtual void		submit		(SubmitContext&) {}
1792 
1793 	// Verify results
verify(VerifyContext &,size_t)1794 	virtual void		verify		(VerifyContext&, size_t) {}
1795 };
1796 
1797 class SubmitCommandBuffer : public Command
1798 {
1799 public:
1800 					SubmitCommandBuffer		(const vector<CmdCommand*>& commands);
1801 					~SubmitCommandBuffer	(void);
1802 
getName(void) const1803 	const char*		getName					(void) const { return "SubmitCommandBuffer"; }
1804 	void			logExecute				(TestLog& log, size_t commandIndex) const;
1805 	void			logPrepare				(TestLog& log, size_t commandIndex) const;
1806 
1807 	// Allocate command buffer and submit commands to command buffer
1808 	void			prepare					(PrepareContext& context);
1809 	void			execute					(ExecuteContext& context);
1810 
1811 	// Verify that results are correct.
1812 	void			verify					(VerifyContext& context, size_t commandIndex);
1813 
1814 private:
1815 	vector<CmdCommand*>				m_commands;
1816 	vk::Move<vk::VkCommandBuffer>	m_commandBuffer;
1817 };
1818 
SubmitCommandBuffer(const vector<CmdCommand * > & commands)1819 SubmitCommandBuffer::SubmitCommandBuffer (const vector<CmdCommand*>& commands)
1820 	: m_commands	(commands)
1821 {
1822 }
1823 
~SubmitCommandBuffer(void)1824 SubmitCommandBuffer::~SubmitCommandBuffer (void)
1825 {
1826 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1827 		delete m_commands[cmdNdx];
1828 }
1829 
prepare(PrepareContext & context)1830 void SubmitCommandBuffer::prepare (PrepareContext& context)
1831 {
1832 	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
1833 	const vk::VkDevice			device		= context.getContext().getDevice();
1834 	const vk::VkCommandPool		commandPool	= context.getContext().getCommandPool();
1835 
1836 	m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY);
1837 
1838 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1839 	{
1840 		CmdCommand& command = *m_commands[cmdNdx];
1841 
1842 		command.prepare(context);
1843 	}
1844 
1845 	{
1846 		SubmitContext submitContext (context, *m_commandBuffer);
1847 
1848 		for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1849 		{
1850 			CmdCommand& command = *m_commands[cmdNdx];
1851 
1852 			command.submit(submitContext);
1853 		}
1854 
1855 		endCommandBuffer(vkd, *m_commandBuffer);
1856 	}
1857 }
1858 
execute(ExecuteContext & context)1859 void SubmitCommandBuffer::execute (ExecuteContext& context)
1860 {
1861 	const vk::DeviceInterface&	vkd		= context.getContext().getDeviceInterface();
1862 	const vk::VkCommandBuffer	cmd		= *m_commandBuffer;
1863 	const vk::VkQueue			queue	= context.getContext().getQueue();
1864 	const vk::VkSubmitInfo		submit	=
1865 	{
1866 		vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
1867 		DE_NULL,
1868 
1869 		0,
1870 		DE_NULL,
1871 		(const vk::VkPipelineStageFlags*)DE_NULL,
1872 
1873 		1,
1874 		&cmd,
1875 
1876 		0,
1877 		DE_NULL
1878 	};
1879 
1880 	vkd.queueSubmit(queue, 1, &submit, 0);
1881 }
1882 
verify(VerifyContext & context,size_t commandIndex)1883 void SubmitCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
1884 {
1885 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
1886 	const tcu::ScopedLogSection	section		(context.getLog(), sectionName, sectionName);
1887 
1888 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1889 		m_commands[cmdNdx]->verify(context, cmdNdx);
1890 }
1891 
logPrepare(TestLog & log,size_t commandIndex) const1892 void SubmitCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
1893 {
1894 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
1895 	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
1896 
1897 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1898 		m_commands[cmdNdx]->logPrepare(log, cmdNdx);
1899 }
1900 
logExecute(TestLog & log,size_t commandIndex) const1901 void SubmitCommandBuffer::logExecute (TestLog& log, size_t commandIndex) const
1902 {
1903 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
1904 	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
1905 
1906 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
1907 		m_commands[cmdNdx]->logSubmit(log, cmdNdx);
1908 }
1909 
1910 class PipelineBarrier : public CmdCommand
1911 {
1912 public:
1913 	enum Type
1914 	{
1915 		TYPE_GLOBAL = 0,
1916 		TYPE_BUFFER,
1917 		TYPE_IMAGE,
1918 		TYPE_LAST
1919 	};
1920 									PipelineBarrier		(const vk::VkPipelineStageFlags			srcStages,
1921 														 const vk::VkAccessFlags				srcAccesses,
1922 														 const vk::VkPipelineStageFlags			dstStages,
1923 														 const vk::VkAccessFlags				dstAccesses,
1924 														 Type									type,
1925 														 const tcu::Maybe<vk::VkImageLayout>	imageLayout);
~PipelineBarrier(void)1926 									~PipelineBarrier	(void) {}
getName(void) const1927 	const char*						getName				(void) const { return "PipelineBarrier"; }
1928 
1929 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
1930 	void							submit				(SubmitContext& context);
1931 
1932 private:
1933 	const vk::VkPipelineStageFlags		m_srcStages;
1934 	const vk::VkAccessFlags				m_srcAccesses;
1935 	const vk::VkPipelineStageFlags		m_dstStages;
1936 	const vk::VkAccessFlags				m_dstAccesses;
1937 	const Type							m_type;
1938 	const tcu::Maybe<vk::VkImageLayout>	m_imageLayout;
1939 };
1940 
PipelineBarrier(const vk::VkPipelineStageFlags srcStages,const vk::VkAccessFlags srcAccesses,const vk::VkPipelineStageFlags dstStages,const vk::VkAccessFlags dstAccesses,Type type,const tcu::Maybe<vk::VkImageLayout> imageLayout)1941 PipelineBarrier::PipelineBarrier (const vk::VkPipelineStageFlags		srcStages,
1942 								  const vk::VkAccessFlags				srcAccesses,
1943 								  const vk::VkPipelineStageFlags		dstStages,
1944 								  const vk::VkAccessFlags				dstAccesses,
1945 								  Type									type,
1946 								  const tcu::Maybe<vk::VkImageLayout>	imageLayout)
1947 	: m_srcStages	(srcStages)
1948 	, m_srcAccesses	(srcAccesses)
1949 	, m_dstStages	(dstStages)
1950 	, m_dstAccesses	(dstAccesses)
1951 	, m_type		(type)
1952 	, m_imageLayout	(imageLayout)
1953 {
1954 }
1955 
logSubmit(TestLog & log,size_t commandIndex) const1956 void PipelineBarrier::logSubmit (TestLog& log, size_t commandIndex) const
1957 {
1958 	log << TestLog::Message << commandIndex << ":" << getName()
1959 		<< " " << (m_type == TYPE_GLOBAL ? "Global pipeline barrier"
1960 					: m_type == TYPE_BUFFER ? "Buffer pipeline barrier"
1961 					: "Image pipeline barrier")
1962 		<< ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
1963 		<< ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses) << TestLog::EndMessage;
1964 }
1965 
submit(SubmitContext & context)1966 void PipelineBarrier::submit (SubmitContext& context)
1967 {
1968 	const vk::DeviceInterface&	vkd	= context.getContext().getDeviceInterface();
1969 	const vk::VkCommandBuffer	cmd	= context.getCommandBuffer();
1970 
1971 	switch (m_type)
1972 	{
1973 		case TYPE_GLOBAL:
1974 		{
1975 			const vk::VkMemoryBarrier	barrier		=
1976 			{
1977 				vk::VK_STRUCTURE_TYPE_MEMORY_BARRIER,
1978 				DE_NULL,
1979 
1980 				m_srcAccesses,
1981 				m_dstAccesses
1982 			};
1983 
1984 			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 1, &barrier, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
1985 			break;
1986 		}
1987 
1988 		case TYPE_BUFFER:
1989 		{
1990 			const vk::VkBufferMemoryBarrier	barrier		=
1991 			{
1992 				vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
1993 				DE_NULL,
1994 
1995 				m_srcAccesses,
1996 				m_dstAccesses,
1997 
1998 				VK_QUEUE_FAMILY_IGNORED,
1999 				VK_QUEUE_FAMILY_IGNORED,
2000 
2001 				context.getBuffer(),
2002 				0,
2003 				VK_WHOLE_SIZE
2004 			};
2005 
2006 			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2007 			break;
2008 		}
2009 
2010 		case TYPE_IMAGE:
2011 		{
2012 			const vk::VkImageMemoryBarrier	barrier		=
2013 			{
2014 				vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2015 				DE_NULL,
2016 
2017 				m_srcAccesses,
2018 				m_dstAccesses,
2019 
2020 				*m_imageLayout,
2021 				*m_imageLayout,
2022 
2023 				VK_QUEUE_FAMILY_IGNORED,
2024 				VK_QUEUE_FAMILY_IGNORED,
2025 
2026 				context.getImage(),
2027 				{
2028 					vk::VK_IMAGE_ASPECT_COLOR_BIT,
2029 					0, 1,
2030 					0, 1
2031 				}
2032 			};
2033 
2034 			vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2035 			break;
2036 		}
2037 
2038 		default:
2039 			DE_FATAL("Unknown pipeline barrier type");
2040 	}
2041 }
2042 
2043 class ImageTransition : public CmdCommand
2044 {
2045 public:
2046 						ImageTransition		(vk::VkPipelineStageFlags	srcStages,
2047 											 vk::VkAccessFlags			srcAccesses,
2048 
2049 											 vk::VkPipelineStageFlags	dstStages,
2050 											 vk::VkAccessFlags			dstAccesses,
2051 
2052 											 vk::VkImageLayout			srcLayout,
2053 											 vk::VkImageLayout			dstLayout);
2054 
~ImageTransition(void)2055 						~ImageTransition	(void) {}
getName(void) const2056 	const char*			getName				(void) const { return "ImageTransition"; }
2057 
2058 	void				prepare				(PrepareContext& context);
2059 	void				logSubmit			(TestLog& log, size_t commandIndex) const;
2060 	void				submit				(SubmitContext& context);
2061 	void				verify				(VerifyContext& context, size_t);
2062 
2063 private:
2064 	const vk::VkPipelineStageFlags	m_srcStages;
2065 	const vk::VkAccessFlags			m_srcAccesses;
2066 	const vk::VkPipelineStageFlags	m_dstStages;
2067 	const vk::VkAccessFlags			m_dstAccesses;
2068 	const vk::VkImageLayout			m_srcLayout;
2069 	const vk::VkImageLayout			m_dstLayout;
2070 
2071 	vk::VkDeviceSize				m_imageMemorySize;
2072 };
2073 
ImageTransition(vk::VkPipelineStageFlags srcStages,vk::VkAccessFlags srcAccesses,vk::VkPipelineStageFlags dstStages,vk::VkAccessFlags dstAccesses,vk::VkImageLayout srcLayout,vk::VkImageLayout dstLayout)2074 ImageTransition::ImageTransition (vk::VkPipelineStageFlags	srcStages,
2075 								  vk::VkAccessFlags			srcAccesses,
2076 
2077 								  vk::VkPipelineStageFlags	dstStages,
2078 								  vk::VkAccessFlags			dstAccesses,
2079 
2080 								  vk::VkImageLayout			srcLayout,
2081 								  vk::VkImageLayout			dstLayout)
2082 	: m_srcStages		(srcStages)
2083 	, m_srcAccesses		(srcAccesses)
2084 	, m_dstStages		(dstStages)
2085 	, m_dstAccesses		(dstAccesses)
2086 	, m_srcLayout		(srcLayout)
2087 	, m_dstLayout		(dstLayout)
2088 {
2089 }
2090 
logSubmit(TestLog & log,size_t commandIndex) const2091 void ImageTransition::logSubmit (TestLog& log, size_t commandIndex) const
2092 {
2093 	log << TestLog::Message << commandIndex << ":" << getName()
2094 		<< " Image transition pipeline barrier"
2095 		<< ", srcStages: " << vk::getPipelineStageFlagsStr(m_srcStages) << ", srcAccesses: " << vk::getAccessFlagsStr(m_srcAccesses)
2096 		<< ", dstStages: " << vk::getPipelineStageFlagsStr(m_dstStages) << ", dstAccesses: " << vk::getAccessFlagsStr(m_dstAccesses)
2097 		<< ", srcLayout: " << m_srcLayout << ", dstLayout: " << m_dstLayout << TestLog::EndMessage;
2098 }
2099 
prepare(PrepareContext & context)2100 void ImageTransition::prepare (PrepareContext& context)
2101 {
2102 	DE_ASSERT(context.getImageLayout() == vk::VK_IMAGE_LAYOUT_UNDEFINED || m_srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED || context.getImageLayout() == m_srcLayout);
2103 
2104 	context.setImageLayout(m_dstLayout);
2105 	m_imageMemorySize = context.getImageMemorySize();
2106 }
2107 
submit(SubmitContext & context)2108 void ImageTransition::submit (SubmitContext& context)
2109 {
2110 	const vk::DeviceInterface&		vkd			= context.getContext().getDeviceInterface();
2111 	const vk::VkCommandBuffer		cmd			= context.getCommandBuffer();
2112 	const vk::VkImageMemoryBarrier	barrier		=
2113 	{
2114 		vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2115 		DE_NULL,
2116 
2117 		m_srcAccesses,
2118 		m_dstAccesses,
2119 
2120 		m_srcLayout,
2121 		m_dstLayout,
2122 
2123 		VK_QUEUE_FAMILY_IGNORED,
2124 		VK_QUEUE_FAMILY_IGNORED,
2125 
2126 		context.getImage(),
2127 		{
2128 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2129 			0u, 1u,
2130 			0u, 1u
2131 		}
2132 	};
2133 
2134 	vkd.cmdPipelineBarrier(cmd, m_srcStages, m_dstStages, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2135 }
2136 
verify(VerifyContext & context,size_t)2137 void ImageTransition::verify (VerifyContext& context, size_t)
2138 {
2139 	context.getReference().setUndefined(0, (size_t)m_imageMemorySize);
2140 }
2141 
2142 class FillBuffer : public CmdCommand
2143 {
2144 public:
FillBuffer(deUint32 value)2145 						FillBuffer	(deUint32 value) : m_value(value) {}
~FillBuffer(void)2146 						~FillBuffer	(void) {}
getName(void) const2147 	const char*			getName		(void) const { return "FillBuffer"; }
2148 
2149 	void				logSubmit	(TestLog& log, size_t commandIndex) const;
2150 	void				submit		(SubmitContext& context);
2151 	void				verify		(VerifyContext& context, size_t commandIndex);
2152 
2153 private:
2154 	const deUint32		m_value;
2155 	vk::VkDeviceSize	m_bufferSize;
2156 };
2157 
logSubmit(TestLog & log,size_t commandIndex) const2158 void FillBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2159 {
2160 	log << TestLog::Message << commandIndex << ":" << getName() << " Fill value: " << m_value << TestLog::EndMessage;
2161 }
2162 
submit(SubmitContext & context)2163 void FillBuffer::submit (SubmitContext& context)
2164 {
2165 	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
2166 	const vk::VkCommandBuffer	cmd			= context.getCommandBuffer();
2167 	const vk::VkBuffer			buffer		= context.getBuffer();
2168 	const vk::VkDeviceSize		sizeMask	= ~(0x3ull); // \note Round down to multiple of 4
2169 
2170 	m_bufferSize = sizeMask & context.getBufferSize();
2171 	vkd.cmdFillBuffer(cmd, buffer, 0, m_bufferSize, m_value);
2172 }
2173 
verify(VerifyContext & context,size_t)2174 void FillBuffer::verify (VerifyContext& context, size_t)
2175 {
2176 	ReferenceMemory&	reference	= context.getReference();
2177 
2178 	for (size_t ndx = 0; ndx < m_bufferSize; ndx++)
2179 	{
2180 #if (DE_ENDIANNESS == DE_LITTLE_ENDIAN)
2181 		reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(ndx % 4)))));
2182 #else
2183 		reference.set(ndx, (deUint8)(0xffu & (m_value >> (8*(3 - (ndx % 4))))));
2184 #endif
2185 	}
2186 }
2187 
2188 class UpdateBuffer : public CmdCommand
2189 {
2190 public:
UpdateBuffer(deUint32 seed)2191 						UpdateBuffer	(deUint32 seed) : m_seed(seed) {}
~UpdateBuffer(void)2192 						~UpdateBuffer	(void) {}
getName(void) const2193 	const char*			getName			(void) const { return "UpdateBuffer"; }
2194 
2195 	void				logSubmit		(TestLog& log, size_t commandIndex) const;
2196 	void				submit			(SubmitContext& context);
2197 	void				verify			(VerifyContext& context, size_t commandIndex);
2198 
2199 private:
2200 	const deUint32		m_seed;
2201 	vk::VkDeviceSize	m_bufferSize;
2202 };
2203 
logSubmit(TestLog & log,size_t commandIndex) const2204 void UpdateBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2205 {
2206 	log << TestLog::Message << commandIndex << ":" << getName() << " Update buffer, seed: " << m_seed << TestLog::EndMessage;
2207 }
2208 
submit(SubmitContext & context)2209 void UpdateBuffer::submit (SubmitContext& context)
2210 {
2211 	const vk::DeviceInterface&	vkd			= context.getContext().getDeviceInterface();
2212 	const vk::VkCommandBuffer	cmd			= context.getCommandBuffer();
2213 	const vk::VkBuffer			buffer		= context.getBuffer();
2214 	const size_t				blockSize	= 65536;
2215 	std::vector<deUint8>		data		(blockSize, 0);
2216 	de::Random					rng			(m_seed);
2217 
2218 	m_bufferSize = context.getBufferSize();
2219 
2220 	for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2221 	{
2222 		for (size_t ndx = 0; ndx < data.size(); ndx++)
2223 			data[ndx] = rng.getUint8();
2224 
2225 		if (m_bufferSize - updated > blockSize)
2226 			vkd.cmdUpdateBuffer(cmd, buffer, updated, blockSize, (const deUint32*)(&data[0]));
2227 		else
2228 			vkd.cmdUpdateBuffer(cmd, buffer, updated, m_bufferSize - updated, (const deUint32*)(&data[0]));
2229 	}
2230 }
2231 
verify(VerifyContext & context,size_t)2232 void UpdateBuffer::verify (VerifyContext& context, size_t)
2233 {
2234 	ReferenceMemory&	reference	= context.getReference();
2235 	const size_t		blockSize	= 65536;
2236 	vector<deUint8>		data		(blockSize, 0);
2237 	de::Random			rng			(m_seed);
2238 
2239 	for (size_t updated = 0; updated < m_bufferSize; updated += blockSize)
2240 	{
2241 		for (size_t ndx = 0; ndx < data.size(); ndx++)
2242 			data[ndx] = rng.getUint8();
2243 
2244 		if (m_bufferSize - updated > blockSize)
2245 			reference.setData(updated, blockSize, &data[0]);
2246 		else
2247 			reference.setData(updated, (size_t)(m_bufferSize - updated), &data[0]);
2248 	}
2249 }
2250 
2251 class BufferCopyToBuffer : public CmdCommand
2252 {
2253 public:
BufferCopyToBuffer(void)2254 									BufferCopyToBuffer	(void) {}
~BufferCopyToBuffer(void)2255 									~BufferCopyToBuffer	(void) {}
getName(void) const2256 	const char*						getName				(void) const { return "BufferCopyToBuffer"; }
2257 
2258 	void							logPrepare			(TestLog& log, size_t commandIndex) const;
2259 	void							prepare				(PrepareContext& context);
2260 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
2261 	void							submit				(SubmitContext& context);
2262 	void							verify				(VerifyContext& context, size_t commandIndex);
2263 
2264 private:
2265 	vk::VkDeviceSize				m_bufferSize;
2266 	vk::Move<vk::VkBuffer>			m_dstBuffer;
2267 	vk::Move<vk::VkDeviceMemory>	m_memory;
2268 };
2269 
logPrepare(TestLog & log,size_t commandIndex) const2270 void BufferCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2271 {
2272 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for buffer to buffer copy." << TestLog::EndMessage;
2273 }
2274 
prepare(PrepareContext & context)2275 void BufferCopyToBuffer::prepare (PrepareContext& context)
2276 {
2277 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2278 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2279 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2280 	const vk::VkDevice				device			= context.getContext().getDevice();
2281 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2282 
2283 	m_bufferSize = context.getBufferSize();
2284 
2285 	m_dstBuffer	= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2286 	m_memory	= bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2287 }
2288 
logSubmit(TestLog & log,size_t commandIndex) const2289 void BufferCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2290 {
2291 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to another buffer" << TestLog::EndMessage;
2292 }
2293 
submit(SubmitContext & context)2294 void BufferCopyToBuffer::submit (SubmitContext& context)
2295 {
2296 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2297 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2298 	const vk::VkBufferCopy		range			=
2299 	{
2300 		0, 0, // Offsets
2301 		m_bufferSize
2302 	};
2303 
2304 	vkd.cmdCopyBuffer(commandBuffer, context.getBuffer(), *m_dstBuffer, 1, &range);
2305 }
2306 
verify(VerifyContext & context,size_t commandIndex)2307 void BufferCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2308 {
2309 	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
2310 	ReferenceMemory&						reference		(context.getReference());
2311 	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
2312 	const vk::VkDevice						device			= context.getContext().getDevice();
2313 	const vk::VkQueue						queue			= context.getContext().getQueue();
2314 	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
2315 	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2316 	const vk::VkBufferMemoryBarrier			barrier			=
2317 	{
2318 		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2319 		DE_NULL,
2320 
2321 		vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2322 		vk::VK_ACCESS_HOST_READ_BIT,
2323 
2324 		VK_QUEUE_FAMILY_IGNORED,
2325 		VK_QUEUE_FAMILY_IGNORED,
2326 		*m_dstBuffer,
2327 		0,
2328 		VK_WHOLE_SIZE
2329 	};
2330 
2331 	vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2332 
2333 	endCommandBuffer(vkd, *commandBuffer);
2334 	submitCommandsAndWait(vkd, device, queue, *commandBuffer);
2335 
2336 	{
2337 		void* const	ptr		= mapMemory(vkd, device, *m_memory, m_bufferSize);
2338 		bool		isOk	= true;
2339 
2340 		vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2341 
2342 		{
2343 			const deUint8* const data = (const deUint8*)ptr;
2344 
2345 			for (size_t pos = 0; pos < (size_t)m_bufferSize; pos++)
2346 			{
2347 				if (reference.isDefined(pos))
2348 				{
2349 					if (data[pos] != reference.get(pos))
2350 					{
2351 						resultCollector.fail(
2352 								de::toString(commandIndex) + ":" + getName()
2353 								+ " Result differs from reference, Expected: "
2354 								+ de::toString(tcu::toHex<8>(reference.get(pos)))
2355 								+ ", Got: "
2356 								+ de::toString(tcu::toHex<8>(data[pos]))
2357 								+ ", At offset: "
2358 								+ de::toString(pos));
2359 						break;
2360 					}
2361 				}
2362 			}
2363 		}
2364 
2365 		vkd.unmapMemory(device, *m_memory);
2366 
2367 		if (!isOk)
2368 			context.getLog() << TestLog::Message << commandIndex << ": Buffer copy to buffer verification failed" << TestLog::EndMessage;
2369 	}
2370 }
2371 
2372 class BufferCopyFromBuffer : public CmdCommand
2373 {
2374 public:
BufferCopyFromBuffer(deUint32 seed)2375 									BufferCopyFromBuffer	(deUint32 seed) : m_seed(seed) {}
~BufferCopyFromBuffer(void)2376 									~BufferCopyFromBuffer	(void) {}
getName(void) const2377 	const char*						getName					(void) const { return "BufferCopyFromBuffer"; }
2378 
2379 	void							logPrepare				(TestLog& log, size_t commandIndex) const;
2380 	void							prepare					(PrepareContext& context);
2381 	void							logSubmit				(TestLog& log, size_t commandIndex) const;
2382 	void							submit					(SubmitContext& context);
2383 	void							verify					(VerifyContext& context, size_t commandIndex);
2384 
2385 private:
2386 	const deUint32					m_seed;
2387 	vk::VkDeviceSize				m_bufferSize;
2388 	vk::Move<vk::VkBuffer>			m_srcBuffer;
2389 	vk::Move<vk::VkDeviceMemory>	m_memory;
2390 };
2391 
logPrepare(TestLog & log,size_t commandIndex) const2392 void BufferCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2393 {
2394 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to buffer copy. Seed: " << m_seed << TestLog::EndMessage;
2395 }
2396 
prepare(PrepareContext & context)2397 void BufferCopyFromBuffer::prepare (PrepareContext& context)
2398 {
2399 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2400 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2401 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2402 	const vk::VkDevice				device			= context.getContext().getDevice();
2403 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2404 
2405 	m_bufferSize	= context.getBufferSize();
2406 	m_srcBuffer		= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2407 	m_memory		= bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2408 
2409 	{
2410 		void* const	ptr	= mapMemory(vkd, device, *m_memory, m_bufferSize);
2411 		de::Random	rng	(m_seed);
2412 
2413 		{
2414 			deUint8* const	data = (deUint8*)ptr;
2415 
2416 			for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2417 				data[ndx] = rng.getUint8();
2418 		}
2419 
2420 		vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
2421 		vkd.unmapMemory(device, *m_memory);
2422 	}
2423 }
2424 
logSubmit(TestLog & log,size_t commandIndex) const2425 void BufferCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2426 {
2427 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from another buffer" << TestLog::EndMessage;
2428 }
2429 
submit(SubmitContext & context)2430 void BufferCopyFromBuffer::submit (SubmitContext& context)
2431 {
2432 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2433 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2434 	const vk::VkBufferCopy		range			=
2435 	{
2436 		0, 0, // Offsets
2437 		m_bufferSize
2438 	};
2439 
2440 	vkd.cmdCopyBuffer(commandBuffer, *m_srcBuffer, context.getBuffer(), 1, &range);
2441 }
2442 
verify(VerifyContext & context,size_t)2443 void BufferCopyFromBuffer::verify (VerifyContext& context, size_t)
2444 {
2445 	ReferenceMemory&	reference	(context.getReference());
2446 	de::Random			rng			(m_seed);
2447 
2448 	for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
2449 		reference.set(ndx, rng.getUint8());
2450 }
2451 
2452 class BufferCopyToImage : public CmdCommand
2453 {
2454 public:
BufferCopyToImage(void)2455 									BufferCopyToImage	(void) {}
~BufferCopyToImage(void)2456 									~BufferCopyToImage	(void) {}
getName(void) const2457 	const char*						getName				(void) const { return "BufferCopyToImage"; }
2458 
2459 	void							logPrepare			(TestLog& log, size_t commandIndex) const;
2460 	void							prepare				(PrepareContext& context);
2461 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
2462 	void							submit				(SubmitContext& context);
2463 	void							verify				(VerifyContext& context, size_t commandIndex);
2464 
2465 private:
2466 	deInt32							m_imageWidth;
2467 	deInt32							m_imageHeight;
2468 	vk::Move<vk::VkImage>			m_dstImage;
2469 	vk::Move<vk::VkDeviceMemory>	m_memory;
2470 };
2471 
logPrepare(TestLog & log,size_t commandIndex) const2472 void BufferCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
2473 {
2474 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for buffer to image copy." << TestLog::EndMessage;
2475 }
2476 
prepare(PrepareContext & context)2477 void BufferCopyToImage::prepare (PrepareContext& context)
2478 {
2479 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2480 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2481 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2482 	const vk::VkDevice				device			= context.getContext().getDevice();
2483 	const vk::VkQueue				queue			= context.getContext().getQueue();
2484 	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
2485 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2486 	const IVec2						imageSize		= findImageSizeWxHx4(context.getBufferSize());
2487 
2488 	m_imageWidth	= imageSize[0];
2489 	m_imageHeight	= imageSize[1];
2490 
2491 	{
2492 		const vk::VkImageCreateInfo	createInfo =
2493 		{
2494 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2495 			DE_NULL,
2496 
2497 			0,
2498 			vk::VK_IMAGE_TYPE_2D,
2499 			vk::VK_FORMAT_R8G8B8A8_UNORM,
2500 			{
2501 				(deUint32)m_imageWidth,
2502 				(deUint32)m_imageHeight,
2503 				1u,
2504 			},
2505 			1, 1, // mipLevels, arrayLayers
2506 			vk::VK_SAMPLE_COUNT_1_BIT,
2507 
2508 			vk::VK_IMAGE_TILING_OPTIMAL,
2509 			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2510 			vk::VK_SHARING_MODE_EXCLUSIVE,
2511 
2512 			(deUint32)queueFamilies.size(),
2513 			&queueFamilies[0],
2514 			vk::VK_IMAGE_LAYOUT_UNDEFINED
2515 		};
2516 
2517 		m_dstImage = vk::createImage(vkd, device, &createInfo);
2518 	}
2519 
2520 	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
2521 
2522 	{
2523 		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2524 		const vk::VkImageMemoryBarrier			barrier			=
2525 		{
2526 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2527 			DE_NULL,
2528 
2529 			0,
2530 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2531 
2532 			vk::VK_IMAGE_LAYOUT_UNDEFINED,
2533 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2534 
2535 			VK_QUEUE_FAMILY_IGNORED,
2536 			VK_QUEUE_FAMILY_IGNORED,
2537 
2538 			*m_dstImage,
2539 			{
2540 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2541 				0,	// Mip level
2542 				1,	// Mip level count
2543 				0,	// Layer
2544 				1	// Layer count
2545 			}
2546 		};
2547 
2548 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
2549 
2550 		endCommandBuffer(vkd, *commandBuffer);
2551 		submitCommandsAndWait(vkd, device, queue, *commandBuffer);
2552 	}
2553 }
2554 
logSubmit(TestLog & log,size_t commandIndex) const2555 void BufferCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
2556 {
2557 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer to image" << TestLog::EndMessage;
2558 }
2559 
submit(SubmitContext & context)2560 void BufferCopyToImage::submit (SubmitContext& context)
2561 {
2562 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2563 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2564 	const vk::VkBufferImageCopy	region			=
2565 	{
2566 		0,
2567 		0, 0,
2568 		{
2569 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2570 			0,	// mipLevel
2571 			0,	// arrayLayer
2572 			1	// layerCount
2573 		},
2574 		{ 0, 0, 0 },
2575 		{
2576 			(deUint32)m_imageWidth,
2577 			(deUint32)m_imageHeight,
2578 			1u
2579 		}
2580 	};
2581 
2582 	vkd.cmdCopyBufferToImage(commandBuffer, context.getBuffer(), *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
2583 }
2584 
verify(VerifyContext & context,size_t commandIndex)2585 void BufferCopyToImage::verify (VerifyContext& context, size_t commandIndex)
2586 {
2587 	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
2588 	ReferenceMemory&						reference		(context.getReference());
2589 	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
2590 	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
2591 	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
2592 	const vk::VkDevice						device			= context.getContext().getDevice();
2593 	const vk::VkQueue						queue			= context.getContext().getQueue();
2594 	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
2595 	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2596 	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
2597 	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2598 	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2599 	{
2600 		const vk::VkImageMemoryBarrier		imageBarrier	=
2601 		{
2602 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2603 			DE_NULL,
2604 
2605 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2606 			vk::VK_ACCESS_TRANSFER_READ_BIT,
2607 
2608 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2609 			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2610 
2611 			VK_QUEUE_FAMILY_IGNORED,
2612 			VK_QUEUE_FAMILY_IGNORED,
2613 
2614 			*m_dstImage,
2615 			{
2616 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2617 				0,	// Mip level
2618 				1,	// Mip level count
2619 				0,	// Layer
2620 				1	// Layer count
2621 			}
2622 		};
2623 		const vk::VkBufferMemoryBarrier bufferBarrier =
2624 		{
2625 			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2626 			DE_NULL,
2627 
2628 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2629 			vk::VK_ACCESS_HOST_READ_BIT,
2630 
2631 			VK_QUEUE_FAMILY_IGNORED,
2632 			VK_QUEUE_FAMILY_IGNORED,
2633 			*dstBuffer,
2634 			0,
2635 			VK_WHOLE_SIZE
2636 		};
2637 
2638 		const vk::VkBufferImageCopy	region =
2639 		{
2640 			0,
2641 			0, 0,
2642 			{
2643 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2644 				0,	// mipLevel
2645 				0,	// arrayLayer
2646 				1	// layerCount
2647 			},
2648 			{ 0, 0, 0 },
2649 			{
2650 				(deUint32)m_imageWidth,
2651 				(deUint32)m_imageHeight,
2652 				1u
2653 			}
2654 		};
2655 
2656 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
2657 		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
2658 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2659 	}
2660 
2661 	endCommandBuffer(vkd, *commandBuffer);
2662 	submitCommandsAndWait(vkd, device, queue, *commandBuffer);
2663 
2664 	{
2665 		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2666 
2667 		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_imageWidth * m_imageHeight);
2668 
2669 		{
2670 			const deUint8* const	data = (const deUint8*)ptr;
2671 
2672 			for (size_t pos = 0; pos < (size_t)( 4 * m_imageWidth * m_imageHeight); pos++)
2673 			{
2674 				if (reference.isDefined(pos))
2675 				{
2676 					if (data[pos] != reference.get(pos))
2677 					{
2678 						resultCollector.fail(
2679 								de::toString(commandIndex) + ":" + getName()
2680 								+ " Result differs from reference, Expected: "
2681 								+ de::toString(tcu::toHex<8>(reference.get(pos)))
2682 								+ ", Got: "
2683 								+ de::toString(tcu::toHex<8>(data[pos]))
2684 								+ ", At offset: "
2685 								+ de::toString(pos));
2686 						break;
2687 					}
2688 				}
2689 			}
2690 		}
2691 
2692 		vkd.unmapMemory(device, *memory);
2693 	}
2694 }
2695 
2696 class BufferCopyFromImage : public CmdCommand
2697 {
2698 public:
BufferCopyFromImage(deUint32 seed)2699 									BufferCopyFromImage		(deUint32 seed) : m_seed(seed) {}
~BufferCopyFromImage(void)2700 									~BufferCopyFromImage	(void) {}
getName(void) const2701 	const char*						getName					(void) const { return "BufferCopyFromImage"; }
2702 
2703 	void							logPrepare				(TestLog& log, size_t commandIndex) const;
2704 	void							prepare					(PrepareContext& context);
2705 	void							logSubmit				(TestLog& log, size_t commandIndex) const;
2706 	void							submit					(SubmitContext& context);
2707 	void							verify					(VerifyContext& context, size_t commandIndex);
2708 
2709 private:
2710 	const deUint32					m_seed;
2711 	deInt32							m_imageWidth;
2712 	deInt32							m_imageHeight;
2713 	vk::Move<vk::VkImage>			m_srcImage;
2714 	vk::Move<vk::VkDeviceMemory>	m_memory;
2715 };
2716 
logPrepare(TestLog & log,size_t commandIndex) const2717 void BufferCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
2718 {
2719 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to buffer copy." << TestLog::EndMessage;
2720 }
2721 
prepare(PrepareContext & context)2722 void BufferCopyFromImage::prepare (PrepareContext& context)
2723 {
2724 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2725 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2726 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2727 	const vk::VkDevice				device			= context.getContext().getDevice();
2728 	const vk::VkQueue				queue			= context.getContext().getQueue();
2729 	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
2730 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2731 	const IVec2						imageSize		= findImageSizeWxHx4(context.getBufferSize());
2732 
2733 	m_imageWidth	= imageSize[0];
2734 	m_imageHeight	= imageSize[1];
2735 
2736 	{
2737 		const vk::VkImageCreateInfo	createInfo =
2738 		{
2739 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2740 			DE_NULL,
2741 
2742 			0,
2743 			vk::VK_IMAGE_TYPE_2D,
2744 			vk::VK_FORMAT_R8G8B8A8_UNORM,
2745 			{
2746 				(deUint32)m_imageWidth,
2747 				(deUint32)m_imageHeight,
2748 				1u,
2749 			},
2750 			1, 1, // mipLevels, arrayLayers
2751 			vk::VK_SAMPLE_COUNT_1_BIT,
2752 
2753 			vk::VK_IMAGE_TILING_OPTIMAL,
2754 			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
2755 			vk::VK_SHARING_MODE_EXCLUSIVE,
2756 
2757 			(deUint32)queueFamilies.size(),
2758 			&queueFamilies[0],
2759 			vk::VK_IMAGE_LAYOUT_UNDEFINED
2760 		};
2761 
2762 		m_srcImage = vk::createImage(vkd, device, &createInfo);
2763 	}
2764 
2765 	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
2766 
2767 	{
2768 		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
2769 		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
2770 		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2771 		const vk::VkImageMemoryBarrier			preImageBarrier	=
2772 		{
2773 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2774 			DE_NULL,
2775 
2776 			0,
2777 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2778 
2779 			vk::VK_IMAGE_LAYOUT_UNDEFINED,
2780 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2781 
2782 			VK_QUEUE_FAMILY_IGNORED,
2783 			VK_QUEUE_FAMILY_IGNORED,
2784 
2785 			*m_srcImage,
2786 			{
2787 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2788 				0,	// Mip level
2789 				1,	// Mip level count
2790 				0,	// Layer
2791 				1	// Layer count
2792 			}
2793 		};
2794 		const vk::VkImageMemoryBarrier			postImageBarrier =
2795 		{
2796 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
2797 			DE_NULL,
2798 
2799 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2800 			0,
2801 
2802 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2803 			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
2804 
2805 			VK_QUEUE_FAMILY_IGNORED,
2806 			VK_QUEUE_FAMILY_IGNORED,
2807 
2808 			*m_srcImage,
2809 			{
2810 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2811 				0,	// Mip level
2812 				1,	// Mip level count
2813 				0,	// Layer
2814 				1	// Layer count
2815 			}
2816 		};
2817 		const vk::VkBufferImageCopy				region				=
2818 		{
2819 			0,
2820 			0, 0,
2821 			{
2822 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
2823 				0,	// mipLevel
2824 				0,	// arrayLayer
2825 				1	// layerCount
2826 			},
2827 			{ 0, 0, 0 },
2828 			{
2829 				(deUint32)m_imageWidth,
2830 				(deUint32)m_imageHeight,
2831 				1u
2832 			}
2833 		};
2834 
2835 		{
2836 			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
2837 			de::Random	rng	(m_seed);
2838 
2839 			{
2840 				deUint8* const	data = (deUint8*)ptr;
2841 
2842 				for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2843 					data[ndx] = rng.getUint8();
2844 			}
2845 
2846 			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
2847 			vkd.unmapMemory(device, *memory);
2848 		}
2849 
2850 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
2851 		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
2852 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
2853 
2854 		endCommandBuffer(vkd, *commandBuffer);
2855 		submitCommandsAndWait(vkd, device, queue, *commandBuffer);
2856 	}
2857 }
2858 
logSubmit(TestLog & log,size_t commandIndex) const2859 void BufferCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
2860 {
2861 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy buffer data from image" << TestLog::EndMessage;
2862 }
2863 
submit(SubmitContext & context)2864 void BufferCopyFromImage::submit (SubmitContext& context)
2865 {
2866 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2867 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2868 	const vk::VkBufferImageCopy	region			=
2869 	{
2870 		0,
2871 		0, 0,
2872 		{
2873 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2874 			0,	// mipLevel
2875 			0,	// arrayLayer
2876 			1	// layerCount
2877 		},
2878 		{ 0, 0, 0 },
2879 		{
2880 			(deUint32)m_imageWidth,
2881 			(deUint32)m_imageHeight,
2882 			1u
2883 		}
2884 	};
2885 
2886 	vkd.cmdCopyImageToBuffer(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getBuffer(), 1, &region);
2887 }
2888 
verify(VerifyContext & context,size_t)2889 void BufferCopyFromImage::verify (VerifyContext& context, size_t)
2890 {
2891 	ReferenceMemory&	reference		(context.getReference());
2892 	de::Random			rng	(m_seed);
2893 
2894 	for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
2895 		reference.set(ndx, rng.getUint8());
2896 }
2897 
2898 class ImageCopyToBuffer : public CmdCommand
2899 {
2900 public:
ImageCopyToBuffer(vk::VkImageLayout imageLayout)2901 									ImageCopyToBuffer	(vk::VkImageLayout imageLayout) : m_imageLayout (imageLayout) {}
~ImageCopyToBuffer(void)2902 									~ImageCopyToBuffer	(void) {}
getName(void) const2903 	const char*						getName				(void) const { return "BufferCopyToImage"; }
2904 
2905 	void							logPrepare			(TestLog& log, size_t commandIndex) const;
2906 	void							prepare				(PrepareContext& context);
2907 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
2908 	void							submit				(SubmitContext& context);
2909 	void							verify				(VerifyContext& context, size_t commandIndex);
2910 
2911 private:
2912 	vk::VkImageLayout				m_imageLayout;
2913 	vk::VkDeviceSize				m_bufferSize;
2914 	vk::Move<vk::VkBuffer>			m_dstBuffer;
2915 	vk::Move<vk::VkDeviceMemory>	m_memory;
2916 	vk::VkDeviceSize				m_imageMemorySize;
2917 	deInt32							m_imageWidth;
2918 	deInt32							m_imageHeight;
2919 };
2920 
logPrepare(TestLog & log,size_t commandIndex) const2921 void ImageCopyToBuffer::logPrepare (TestLog& log, size_t commandIndex) const
2922 {
2923 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination buffer for image to buffer copy." << TestLog::EndMessage;
2924 }
2925 
prepare(PrepareContext & context)2926 void ImageCopyToBuffer::prepare (PrepareContext& context)
2927 {
2928 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
2929 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
2930 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
2931 	const vk::VkDevice				device			= context.getContext().getDevice();
2932 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
2933 
2934 	m_imageWidth		= context.getImageWidth();
2935 	m_imageHeight		= context.getImageHeight();
2936 	m_bufferSize		= 4 * m_imageWidth * m_imageHeight;
2937 	m_imageMemorySize	= context.getImageMemorySize();
2938 	m_dstBuffer			= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
2939 	m_memory			= bindBufferMemory(vki, vkd, physicalDevice, device, *m_dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
2940 }
2941 
logSubmit(TestLog & log,size_t commandIndex) const2942 void ImageCopyToBuffer::logSubmit (TestLog& log, size_t commandIndex) const
2943 {
2944 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to buffer" << TestLog::EndMessage;
2945 }
2946 
submit(SubmitContext & context)2947 void ImageCopyToBuffer::submit (SubmitContext& context)
2948 {
2949 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
2950 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
2951 	const vk::VkBufferImageCopy	region			=
2952 	{
2953 		0,
2954 		0, 0,
2955 		{
2956 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
2957 			0,	// mipLevel
2958 			0,	// arrayLayer
2959 			1	// layerCount
2960 		},
2961 		{ 0, 0, 0 },
2962 		{
2963 			(deUint32)m_imageWidth,
2964 			(deUint32)m_imageHeight,
2965 			1u
2966 		}
2967 	};
2968 
2969 	vkd.cmdCopyImageToBuffer(commandBuffer, context.getImage(), m_imageLayout, *m_dstBuffer, 1, &region);
2970 }
2971 
verify(VerifyContext & context,size_t commandIndex)2972 void ImageCopyToBuffer::verify (VerifyContext& context, size_t commandIndex)
2973 {
2974 	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
2975 	ReferenceMemory&						reference		(context.getReference());
2976 	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
2977 	const vk::VkDevice						device			= context.getContext().getDevice();
2978 	const vk::VkQueue						queue			= context.getContext().getQueue();
2979 	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
2980 	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2981 	const vk::VkBufferMemoryBarrier			barrier			=
2982 	{
2983 		vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
2984 		DE_NULL,
2985 
2986 		vk::VK_ACCESS_TRANSFER_WRITE_BIT,
2987 		vk::VK_ACCESS_HOST_READ_BIT,
2988 
2989 		VK_QUEUE_FAMILY_IGNORED,
2990 		VK_QUEUE_FAMILY_IGNORED,
2991 		*m_dstBuffer,
2992 		0,
2993 		VK_WHOLE_SIZE
2994 	};
2995 
2996 	vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &barrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
2997 
2998 	endCommandBuffer(vkd, *commandBuffer);
2999 	submitCommandsAndWait(vkd, device, queue, *commandBuffer);
3000 
3001 	reference.setUndefined(0, (size_t)m_imageMemorySize);
3002 	{
3003 		void* const						ptr				= mapMemory(vkd, device, *m_memory, m_bufferSize);
3004 		const ConstPixelBufferAccess	referenceImage	(context.getReferenceImage().getAccess());
3005 		const ConstPixelBufferAccess	resultImage		(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, ptr);
3006 
3007 		vk::invalidateMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3008 
3009 		if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), referenceImage, resultImage, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3010 			resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3011 
3012 		vkd.unmapMemory(device, *m_memory);
3013 	}
3014 }
3015 
3016 class ImageCopyFromBuffer : public CmdCommand
3017 {
3018 public:
ImageCopyFromBuffer(deUint32 seed,vk::VkImageLayout imageLayout)3019 									ImageCopyFromBuffer		(deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
~ImageCopyFromBuffer(void)3020 									~ImageCopyFromBuffer	(void) {}
getName(void) const3021 	const char*						getName					(void) const { return "ImageCopyFromBuffer"; }
3022 
3023 	void							logPrepare				(TestLog& log, size_t commandIndex) const;
3024 	void							prepare					(PrepareContext& context);
3025 	void							logSubmit				(TestLog& log, size_t commandIndex) const;
3026 	void							submit					(SubmitContext& context);
3027 	void							verify					(VerifyContext& context, size_t commandIndex);
3028 
3029 private:
3030 	const deUint32					m_seed;
3031 	const vk::VkImageLayout			m_imageLayout;
3032 	deInt32							m_imageWidth;
3033 	deInt32							m_imageHeight;
3034 	vk::VkDeviceSize				m_imageMemorySize;
3035 	vk::VkDeviceSize				m_bufferSize;
3036 	vk::Move<vk::VkBuffer>			m_srcBuffer;
3037 	vk::Move<vk::VkDeviceMemory>	m_memory;
3038 };
3039 
logPrepare(TestLog & log,size_t commandIndex) const3040 void ImageCopyFromBuffer::logPrepare (TestLog& log, size_t commandIndex) const
3041 {
3042 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source buffer for buffer to image copy. Seed: " << m_seed << TestLog::EndMessage;
3043 }
3044 
prepare(PrepareContext & context)3045 void ImageCopyFromBuffer::prepare (PrepareContext& context)
3046 {
3047 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3048 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3049 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3050 	const vk::VkDevice				device			= context.getContext().getDevice();
3051 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3052 
3053 	m_imageWidth		= context.getImageHeight();
3054 	m_imageHeight		= context.getImageWidth();
3055 	m_imageMemorySize	= context.getImageMemorySize();
3056 	m_bufferSize		= m_imageWidth * m_imageHeight * 4;
3057 	m_srcBuffer			= createBuffer(vkd, device, m_bufferSize, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies);
3058 	m_memory			= bindBufferMemory(vki, vkd, physicalDevice, device, *m_srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
3059 
3060 	{
3061 		void* const	ptr	= mapMemory(vkd, device, *m_memory, m_bufferSize);
3062 		de::Random	rng	(m_seed);
3063 
3064 		{
3065 			deUint8* const	data = (deUint8*)ptr;
3066 
3067 			for (size_t ndx = 0; ndx < (size_t)m_bufferSize; ndx++)
3068 				data[ndx] = rng.getUint8();
3069 		}
3070 
3071 		vk::flushMappedMemoryRange(vkd, device, *m_memory, 0, m_bufferSize);
3072 		vkd.unmapMemory(device, *m_memory);
3073 	}
3074 }
3075 
logSubmit(TestLog & log,size_t commandIndex) const3076 void ImageCopyFromBuffer::logSubmit (TestLog& log, size_t commandIndex) const
3077 {
3078 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from buffer" << TestLog::EndMessage;
3079 }
3080 
submit(SubmitContext & context)3081 void ImageCopyFromBuffer::submit (SubmitContext& context)
3082 {
3083 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3084 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3085 	const vk::VkBufferImageCopy	region			=
3086 	{
3087 		0,
3088 		0, 0,
3089 		{
3090 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3091 			0,	// mipLevel
3092 			0,	// arrayLayer
3093 			1	// layerCount
3094 		},
3095 		{ 0, 0, 0 },
3096 		{
3097 			(deUint32)m_imageWidth,
3098 			(deUint32)m_imageHeight,
3099 			1u
3100 		}
3101 	};
3102 
3103 	vkd.cmdCopyBufferToImage(commandBuffer, *m_srcBuffer, context.getImage(), m_imageLayout, 1, &region);
3104 }
3105 
verify(VerifyContext & context,size_t)3106 void ImageCopyFromBuffer::verify (VerifyContext& context, size_t)
3107 {
3108 	ReferenceMemory&	reference	(context.getReference());
3109 	de::Random			rng			(m_seed);
3110 
3111 	reference.setUndefined(0, (size_t)m_imageMemorySize);
3112 
3113 	{
3114 		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3115 
3116 		for (deInt32 y = 0; y < m_imageHeight; y++)
3117 		for (deInt32 x = 0; x < m_imageWidth; x++)
3118 		{
3119 			const deUint8 r8 = rng.getUint8();
3120 			const deUint8 g8 = rng.getUint8();
3121 			const deUint8 b8 = rng.getUint8();
3122 			const deUint8 a8 = rng.getUint8();
3123 
3124 			refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3125 		}
3126 	}
3127 }
3128 
3129 class ImageCopyFromImage : public CmdCommand
3130 {
3131 public:
ImageCopyFromImage(deUint32 seed,vk::VkImageLayout imageLayout)3132 									ImageCopyFromImage	(deUint32 seed, vk::VkImageLayout imageLayout) : m_seed(seed), m_imageLayout(imageLayout) {}
~ImageCopyFromImage(void)3133 									~ImageCopyFromImage	(void) {}
getName(void) const3134 	const char*						getName				(void) const { return "ImageCopyFromImage"; }
3135 
3136 	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3137 	void							prepare				(PrepareContext& context);
3138 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3139 	void							submit				(SubmitContext& context);
3140 	void							verify				(VerifyContext& context, size_t commandIndex);
3141 
3142 private:
3143 	const deUint32					m_seed;
3144 	const vk::VkImageLayout			m_imageLayout;
3145 	deInt32							m_imageWidth;
3146 	deInt32							m_imageHeight;
3147 	vk::VkDeviceSize				m_imageMemorySize;
3148 	vk::Move<vk::VkImage>			m_srcImage;
3149 	vk::Move<vk::VkDeviceMemory>	m_memory;
3150 };
3151 
logPrepare(TestLog & log,size_t commandIndex) const3152 void ImageCopyFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3153 {
3154 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image copy." << TestLog::EndMessage;
3155 }
3156 
prepare(PrepareContext & context)3157 void ImageCopyFromImage::prepare (PrepareContext& context)
3158 {
3159 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3160 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3161 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3162 	const vk::VkDevice				device			= context.getContext().getDevice();
3163 	const vk::VkQueue				queue			= context.getContext().getQueue();
3164 	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3165 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3166 
3167 	m_imageWidth		= context.getImageWidth();
3168 	m_imageHeight		= context.getImageHeight();
3169 	m_imageMemorySize	= context.getImageMemorySize();
3170 
3171 	{
3172 		const vk::VkImageCreateInfo	createInfo =
3173 		{
3174 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3175 			DE_NULL,
3176 
3177 			0,
3178 			vk::VK_IMAGE_TYPE_2D,
3179 			vk::VK_FORMAT_R8G8B8A8_UNORM,
3180 			{
3181 				(deUint32)m_imageWidth,
3182 				(deUint32)m_imageHeight,
3183 				1u,
3184 			},
3185 			1, 1, // mipLevels, arrayLayers
3186 			vk::VK_SAMPLE_COUNT_1_BIT,
3187 
3188 			vk::VK_IMAGE_TILING_OPTIMAL,
3189 			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3190 			vk::VK_SHARING_MODE_EXCLUSIVE,
3191 
3192 			(deUint32)queueFamilies.size(),
3193 			&queueFamilies[0],
3194 			vk::VK_IMAGE_LAYOUT_UNDEFINED
3195 		};
3196 
3197 		m_srcImage = vk::createImage(vkd, device, &createInfo);
3198 	}
3199 
3200 	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3201 
3202 	{
3203 		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3204 		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3205 		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3206 		const vk::VkImageMemoryBarrier			preImageBarrier	=
3207 		{
3208 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3209 			DE_NULL,
3210 
3211 			0,
3212 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3213 
3214 			vk::VK_IMAGE_LAYOUT_UNDEFINED,
3215 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3216 
3217 			VK_QUEUE_FAMILY_IGNORED,
3218 			VK_QUEUE_FAMILY_IGNORED,
3219 
3220 			*m_srcImage,
3221 			{
3222 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3223 				0,	// Mip level
3224 				1,	// Mip level count
3225 				0,	// Layer
3226 				1	// Layer count
3227 			}
3228 		};
3229 		const vk::VkImageMemoryBarrier			postImageBarrier =
3230 		{
3231 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3232 			DE_NULL,
3233 
3234 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3235 			0,
3236 
3237 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3238 			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3239 
3240 			VK_QUEUE_FAMILY_IGNORED,
3241 			VK_QUEUE_FAMILY_IGNORED,
3242 
3243 			*m_srcImage,
3244 			{
3245 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3246 				0,	// Mip level
3247 				1,	// Mip level count
3248 				0,	// Layer
3249 				1	// Layer count
3250 			}
3251 		};
3252 		const vk::VkBufferImageCopy				region				=
3253 		{
3254 			0,
3255 			0, 0,
3256 			{
3257 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3258 				0,	// mipLevel
3259 				0,	// arrayLayer
3260 				1	// layerCount
3261 			},
3262 			{ 0, 0, 0 },
3263 			{
3264 				(deUint32)m_imageWidth,
3265 				(deUint32)m_imageHeight,
3266 				1u
3267 			}
3268 		};
3269 
3270 		{
3271 			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3272 			de::Random	rng	(m_seed);
3273 
3274 			{
3275 				deUint8* const	data = (deUint8*)ptr;
3276 
3277 				for (size_t ndx = 0; ndx < (size_t)(4 * m_imageWidth * m_imageHeight); ndx++)
3278 					data[ndx] = rng.getUint8();
3279 			}
3280 
3281 			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_imageWidth * m_imageHeight);
3282 			vkd.unmapMemory(device, *memory);
3283 		}
3284 
3285 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3286 		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
3287 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3288 
3289 		endCommandBuffer(vkd, *commandBuffer);
3290 		submitCommandsAndWait(vkd, device, queue, *commandBuffer);
3291 	}
3292 }
3293 
logSubmit(TestLog & log,size_t commandIndex) const3294 void ImageCopyFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3295 {
3296 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image data from another image" << TestLog::EndMessage;
3297 }
3298 
submit(SubmitContext & context)3299 void ImageCopyFromImage::submit (SubmitContext& context)
3300 {
3301 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3302 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3303 	const vk::VkImageCopy		region			=
3304 	{
3305 		{
3306 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3307 			0,	// mipLevel
3308 			0,	// arrayLayer
3309 			1	// layerCount
3310 		},
3311 		{ 0, 0, 0 },
3312 
3313 		{
3314 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3315 			0,	// mipLevel
3316 			0,	// arrayLayer
3317 			1	// layerCount
3318 		},
3319 		{ 0, 0, 0 },
3320 		{
3321 			(deUint32)m_imageWidth,
3322 			(deUint32)m_imageHeight,
3323 			1u
3324 		}
3325 	};
3326 
3327 	vkd.cmdCopyImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, &region);
3328 }
3329 
verify(VerifyContext & context,size_t)3330 void ImageCopyFromImage::verify (VerifyContext& context, size_t)
3331 {
3332 	ReferenceMemory&	reference	(context.getReference());
3333 	de::Random			rng			(m_seed);
3334 
3335 	reference.setUndefined(0, (size_t)m_imageMemorySize);
3336 
3337 	{
3338 		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3339 
3340 		for (deInt32 y = 0; y < m_imageHeight; y++)
3341 		for (deInt32 x = 0; x < m_imageWidth; x++)
3342 		{
3343 			const deUint8 r8 = rng.getUint8();
3344 			const deUint8 g8 = rng.getUint8();
3345 			const deUint8 b8 = rng.getUint8();
3346 			const deUint8 a8 = rng.getUint8();
3347 
3348 			refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3349 		}
3350 	}
3351 }
3352 
3353 class ImageCopyToImage : public CmdCommand
3354 {
3355 public:
ImageCopyToImage(vk::VkImageLayout imageLayout)3356 									ImageCopyToImage	(vk::VkImageLayout imageLayout) : m_imageLayout(imageLayout) {}
~ImageCopyToImage(void)3357 									~ImageCopyToImage	(void) {}
getName(void) const3358 	const char*						getName				(void) const { return "ImageCopyToImage"; }
3359 
3360 	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3361 	void							prepare				(PrepareContext& context);
3362 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3363 	void							submit				(SubmitContext& context);
3364 	void							verify				(VerifyContext& context, size_t commandIndex);
3365 
3366 private:
3367 	const vk::VkImageLayout			m_imageLayout;
3368 	deInt32							m_imageWidth;
3369 	deInt32							m_imageHeight;
3370 	vk::VkDeviceSize				m_imageMemorySize;
3371 	vk::Move<vk::VkImage>			m_dstImage;
3372 	vk::Move<vk::VkDeviceMemory>	m_memory;
3373 };
3374 
logPrepare(TestLog & log,size_t commandIndex) const3375 void ImageCopyToImage::logPrepare (TestLog& log, size_t commandIndex) const
3376 {
3377 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image copy." << TestLog::EndMessage;
3378 }
3379 
prepare(PrepareContext & context)3380 void ImageCopyToImage::prepare (PrepareContext& context)
3381 {
3382 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3383 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3384 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3385 	const vk::VkDevice				device			= context.getContext().getDevice();
3386 	const vk::VkQueue				queue			= context.getContext().getQueue();
3387 	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3388 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3389 
3390 	m_imageWidth		= context.getImageWidth();
3391 	m_imageHeight		= context.getImageHeight();
3392 	m_imageMemorySize	= context.getImageMemorySize();
3393 
3394 	{
3395 		const vk::VkImageCreateInfo	createInfo =
3396 		{
3397 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3398 			DE_NULL,
3399 
3400 			0,
3401 			vk::VK_IMAGE_TYPE_2D,
3402 			vk::VK_FORMAT_R8G8B8A8_UNORM,
3403 			{
3404 				(deUint32)m_imageWidth,
3405 				(deUint32)m_imageHeight,
3406 				1u,
3407 			},
3408 			1, 1, // mipLevels, arrayLayers
3409 			vk::VK_SAMPLE_COUNT_1_BIT,
3410 
3411 			vk::VK_IMAGE_TILING_OPTIMAL,
3412 			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3413 			vk::VK_SHARING_MODE_EXCLUSIVE,
3414 
3415 			(deUint32)queueFamilies.size(),
3416 			&queueFamilies[0],
3417 			vk::VK_IMAGE_LAYOUT_UNDEFINED
3418 		};
3419 
3420 		m_dstImage = vk::createImage(vkd, device, &createInfo);
3421 	}
3422 
3423 	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3424 
3425 	{
3426 		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3427 		const vk::VkImageMemoryBarrier			barrier			=
3428 		{
3429 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3430 			DE_NULL,
3431 
3432 			0,
3433 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3434 
3435 			vk::VK_IMAGE_LAYOUT_UNDEFINED,
3436 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3437 
3438 			VK_QUEUE_FAMILY_IGNORED,
3439 			VK_QUEUE_FAMILY_IGNORED,
3440 
3441 			*m_dstImage,
3442 			{
3443 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3444 				0,	// Mip level
3445 				1,	// Mip level count
3446 				0,	// Layer
3447 				1	// Layer count
3448 			}
3449 		};
3450 
3451 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
3452 
3453 		endCommandBuffer(vkd, *commandBuffer);
3454 		submitCommandsAndWait(vkd, device, queue, *commandBuffer);
3455 	}
3456 }
3457 
logSubmit(TestLog & log,size_t commandIndex) const3458 void ImageCopyToImage::logSubmit (TestLog& log, size_t commandIndex) const
3459 {
3460 	log << TestLog::Message << commandIndex << ":" << getName() << " Copy image to another image" << TestLog::EndMessage;
3461 }
3462 
submit(SubmitContext & context)3463 void ImageCopyToImage::submit (SubmitContext& context)
3464 {
3465 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3466 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3467 	const vk::VkImageCopy		region			=
3468 	{
3469 		{
3470 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3471 			0,	// mipLevel
3472 			0,	// arrayLayer
3473 			1	// layerCount
3474 		},
3475 		{ 0, 0, 0 },
3476 
3477 		{
3478 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3479 			0,	// mipLevel
3480 			0,	// arrayLayer
3481 			1	// layerCount
3482 		},
3483 		{ 0, 0, 0 },
3484 		{
3485 			(deUint32)m_imageWidth,
3486 			(deUint32)m_imageHeight,
3487 			1u
3488 		}
3489 	};
3490 
3491 	vkd.cmdCopyImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
3492 }
3493 
verify(VerifyContext & context,size_t commandIndex)3494 void ImageCopyToImage::verify (VerifyContext& context, size_t commandIndex)
3495 {
3496 	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
3497 	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
3498 	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
3499 	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
3500 	const vk::VkDevice						device			= context.getContext().getDevice();
3501 	const vk::VkQueue						queue			= context.getContext().getQueue();
3502 	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
3503 	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3504 	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
3505 	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_imageWidth * m_imageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3506 	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3507 	{
3508 		const vk::VkImageMemoryBarrier		imageBarrier	=
3509 		{
3510 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3511 			DE_NULL,
3512 
3513 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3514 			vk::VK_ACCESS_TRANSFER_READ_BIT,
3515 
3516 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3517 			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3518 
3519 			VK_QUEUE_FAMILY_IGNORED,
3520 			VK_QUEUE_FAMILY_IGNORED,
3521 
3522 			*m_dstImage,
3523 			{
3524 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3525 				0,	// Mip level
3526 				1,	// Mip level count
3527 				0,	// Layer
3528 				1	// Layer count
3529 			}
3530 		};
3531 		const vk::VkBufferMemoryBarrier bufferBarrier =
3532 		{
3533 			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
3534 			DE_NULL,
3535 
3536 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3537 			vk::VK_ACCESS_HOST_READ_BIT,
3538 
3539 			VK_QUEUE_FAMILY_IGNORED,
3540 			VK_QUEUE_FAMILY_IGNORED,
3541 			*dstBuffer,
3542 			0,
3543 			VK_WHOLE_SIZE
3544 		};
3545 		const vk::VkBufferImageCopy	region =
3546 		{
3547 			0,
3548 			0, 0,
3549 			{
3550 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3551 				0,	// mipLevel
3552 				0,	// arrayLayer
3553 				1	// layerCount
3554 			},
3555 			{ 0, 0, 0 },
3556 			{
3557 				(deUint32)m_imageWidth,
3558 				(deUint32)m_imageHeight,
3559 				1u
3560 			}
3561 		};
3562 
3563 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
3564 		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
3565 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
3566 	}
3567 
3568 	endCommandBuffer(vkd, *commandBuffer);
3569 	submitCommandsAndWait(vkd, device, queue, *commandBuffer);
3570 
3571 	{
3572 		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_imageWidth * m_imageHeight);
3573 
3574 		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_imageWidth * m_imageHeight);
3575 
3576 		{
3577 			const deUint8* const			data		= (const deUint8*)ptr;
3578 			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_imageWidth, m_imageHeight, 1, data);
3579 			const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3580 
3581 			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
3582 				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
3583 		}
3584 
3585 		vkd.unmapMemory(device, *memory);
3586 	}
3587 }
3588 
3589 enum BlitScale
3590 {
3591 	BLIT_SCALE_20,
3592 	BLIT_SCALE_10,
3593 };
3594 
3595 class ImageBlitFromImage : public CmdCommand
3596 {
3597 public:
ImageBlitFromImage(deUint32 seed,BlitScale scale,vk::VkImageLayout imageLayout)3598 									ImageBlitFromImage	(deUint32 seed, BlitScale scale, vk::VkImageLayout imageLayout) : m_seed(seed), m_scale(scale), m_imageLayout(imageLayout) {}
~ImageBlitFromImage(void)3599 									~ImageBlitFromImage	(void) {}
getName(void) const3600 	const char*						getName				(void) const { return "ImageBlitFromImage"; }
3601 
3602 	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3603 	void							prepare				(PrepareContext& context);
3604 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3605 	void							submit				(SubmitContext& context);
3606 	void							verify				(VerifyContext& context, size_t commandIndex);
3607 
3608 private:
3609 	const deUint32					m_seed;
3610 	const BlitScale					m_scale;
3611 	const vk::VkImageLayout			m_imageLayout;
3612 	deInt32							m_imageWidth;
3613 	deInt32							m_imageHeight;
3614 	vk::VkDeviceSize				m_imageMemorySize;
3615 	deInt32							m_srcImageWidth;
3616 	deInt32							m_srcImageHeight;
3617 	vk::Move<vk::VkImage>			m_srcImage;
3618 	vk::Move<vk::VkDeviceMemory>	m_memory;
3619 };
3620 
logPrepare(TestLog & log,size_t commandIndex) const3621 void ImageBlitFromImage::logPrepare (TestLog& log, size_t commandIndex) const
3622 {
3623 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate source image for image to image blit." << TestLog::EndMessage;
3624 }
3625 
prepare(PrepareContext & context)3626 void ImageBlitFromImage::prepare (PrepareContext& context)
3627 {
3628 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3629 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3630 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3631 	const vk::VkDevice				device			= context.getContext().getDevice();
3632 	const vk::VkQueue				queue			= context.getContext().getQueue();
3633 	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3634 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3635 
3636 	m_imageWidth		= context.getImageWidth();
3637 	m_imageHeight		= context.getImageHeight();
3638 	m_imageMemorySize	= context.getImageMemorySize();
3639 
3640 	if (m_scale == BLIT_SCALE_10)
3641 	{
3642 		m_srcImageWidth			= m_imageWidth;
3643 		m_srcImageHeight		= m_imageHeight;
3644 	}
3645 	else if (m_scale == BLIT_SCALE_20)
3646 	{
3647 		m_srcImageWidth			= m_imageWidth / 2;
3648 		m_srcImageHeight		= m_imageHeight / 2;
3649 	}
3650 	else
3651 		DE_FATAL("Unsupported scale");
3652 
3653 	{
3654 		const vk::VkImageCreateInfo	createInfo =
3655 		{
3656 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3657 			DE_NULL,
3658 
3659 			0,
3660 			vk::VK_IMAGE_TYPE_2D,
3661 			vk::VK_FORMAT_R8G8B8A8_UNORM,
3662 			{
3663 				(deUint32)m_srcImageWidth,
3664 				(deUint32)m_srcImageHeight,
3665 				1u,
3666 			},
3667 			1, 1, // mipLevels, arrayLayers
3668 			vk::VK_SAMPLE_COUNT_1_BIT,
3669 
3670 			vk::VK_IMAGE_TILING_OPTIMAL,
3671 			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3672 			vk::VK_SHARING_MODE_EXCLUSIVE,
3673 
3674 			(deUint32)queueFamilies.size(),
3675 			&queueFamilies[0],
3676 			vk::VK_IMAGE_LAYOUT_UNDEFINED
3677 		};
3678 
3679 		m_srcImage = vk::createImage(vkd, device, &createInfo);
3680 	}
3681 
3682 	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_srcImage, 0);
3683 
3684 	{
3685 		const vk::Unique<vk::VkBuffer>			srcBuffer		(createBuffer(vkd, device, 4 * m_srcImageWidth * m_srcImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_SRC_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
3686 		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *srcBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
3687 		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3688 		const vk::VkImageMemoryBarrier			preImageBarrier	=
3689 		{
3690 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3691 			DE_NULL,
3692 
3693 			0,
3694 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3695 
3696 			vk::VK_IMAGE_LAYOUT_UNDEFINED,
3697 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3698 
3699 			VK_QUEUE_FAMILY_IGNORED,
3700 			VK_QUEUE_FAMILY_IGNORED,
3701 
3702 			*m_srcImage,
3703 			{
3704 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3705 				0,	// Mip level
3706 				1,	// Mip level count
3707 				0,	// Layer
3708 				1	// Layer count
3709 			}
3710 		};
3711 		const vk::VkImageMemoryBarrier			postImageBarrier =
3712 		{
3713 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3714 			DE_NULL,
3715 
3716 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3717 			0,
3718 
3719 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3720 			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
3721 
3722 			VK_QUEUE_FAMILY_IGNORED,
3723 			VK_QUEUE_FAMILY_IGNORED,
3724 
3725 			*m_srcImage,
3726 			{
3727 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3728 				0,	// Mip level
3729 				1,	// Mip level count
3730 				0,	// Layer
3731 				1	// Layer count
3732 			}
3733 		};
3734 		const vk::VkBufferImageCopy				region				=
3735 		{
3736 			0,
3737 			0, 0,
3738 			{
3739 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3740 				0,	// mipLevel
3741 				0,	// arrayLayer
3742 				1	// layerCount
3743 			},
3744 			{ 0, 0, 0 },
3745 			{
3746 				(deUint32)m_srcImageWidth,
3747 				(deUint32)m_srcImageHeight,
3748 				1u
3749 			}
3750 		};
3751 
3752 		{
3753 			void* const	ptr	= mapMemory(vkd, device, *memory, 4 * m_srcImageWidth * m_srcImageHeight);
3754 			de::Random	rng	(m_seed);
3755 
3756 			{
3757 				deUint8* const	data = (deUint8*)ptr;
3758 
3759 				for (size_t ndx = 0; ndx < (size_t)(4 * m_srcImageWidth * m_srcImageHeight); ndx++)
3760 					data[ndx] = rng.getUint8();
3761 			}
3762 
3763 			vk::flushMappedMemoryRange(vkd, device, *memory, 0, 4 * m_srcImageWidth * m_srcImageHeight);
3764 			vkd.unmapMemory(device, *memory);
3765 		}
3766 
3767 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &preImageBarrier);
3768 		vkd.cmdCopyBufferToImage(*commandBuffer, *srcBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region);
3769 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &postImageBarrier);
3770 
3771 		endCommandBuffer(vkd, *commandBuffer);
3772 		submitCommandsAndWait(vkd, device, queue, *commandBuffer);
3773 	}
3774 }
3775 
logSubmit(TestLog & log,size_t commandIndex) const3776 void ImageBlitFromImage::logSubmit (TestLog& log, size_t commandIndex) const
3777 {
3778 	log << TestLog::Message << commandIndex << ":" << getName() << " Blit from another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "")  << TestLog::EndMessage;
3779 }
3780 
submit(SubmitContext & context)3781 void ImageBlitFromImage::submit (SubmitContext& context)
3782 {
3783 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
3784 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
3785 	const vk::VkImageBlit		region			=
3786 	{
3787 		// Src
3788 		{
3789 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3790 			0,	// mipLevel
3791 			0,	// arrayLayer
3792 			1	// layerCount
3793 		},
3794 		{
3795 			{ 0, 0, 0 },
3796 			{
3797 				m_srcImageWidth,
3798 				m_srcImageHeight,
3799 				1
3800 			},
3801 		},
3802 
3803 		// Dst
3804 		{
3805 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
3806 			0,	// mipLevel
3807 			0,	// arrayLayer
3808 			1	// layerCount
3809 		},
3810 		{
3811 			{ 0, 0, 0 },
3812 			{
3813 				m_imageWidth,
3814 				m_imageHeight,
3815 				1u
3816 			}
3817 		}
3818 	};
3819 	vkd.cmdBlitImage(commandBuffer, *m_srcImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, context.getImage(), m_imageLayout, 1, &region, vk::VK_FILTER_NEAREST);
3820 }
3821 
verify(VerifyContext & context,size_t)3822 void ImageBlitFromImage::verify (VerifyContext& context, size_t)
3823 {
3824 	ReferenceMemory&	reference	(context.getReference());
3825 	de::Random			rng			(m_seed);
3826 
3827 	reference.setUndefined(0, (size_t)m_imageMemorySize);
3828 
3829 	{
3830 		const PixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
3831 
3832 		if (m_scale == BLIT_SCALE_10)
3833 		{
3834 			for (deInt32 y = 0; y < m_imageHeight; y++)
3835 			for (deInt32 x = 0; x < m_imageWidth; x++)
3836 			{
3837 				const deUint8 r8 = rng.getUint8();
3838 				const deUint8 g8 = rng.getUint8();
3839 				const deUint8 b8 = rng.getUint8();
3840 				const deUint8 a8 = rng.getUint8();
3841 
3842 				refAccess.setPixel(UVec4(r8, g8, b8, a8), x, y);
3843 			}
3844 		}
3845 		else if (m_scale == BLIT_SCALE_20)
3846 		{
3847 			tcu::TextureLevel	source	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_srcImageWidth, m_srcImageHeight);
3848 			const float			xscale	= ((float)m_srcImageWidth)  / (float)m_imageWidth;
3849 			const float			yscale	= ((float)m_srcImageHeight) / (float)m_imageHeight;
3850 
3851 			for (deInt32 y = 0; y < m_srcImageHeight; y++)
3852 			for (deInt32 x = 0; x < m_srcImageWidth; x++)
3853 			{
3854 				const deUint8 r8 = rng.getUint8();
3855 				const deUint8 g8 = rng.getUint8();
3856 				const deUint8 b8 = rng.getUint8();
3857 				const deUint8 a8 = rng.getUint8();
3858 
3859 				source.getAccess().setPixel(UVec4(r8, g8, b8, a8), x, y);
3860 			}
3861 
3862 			for (deInt32 y = 0; y < m_imageHeight; y++)
3863 			for (deInt32 x = 0; x < m_imageWidth; x++)
3864 				refAccess.setPixel(source.getAccess().getPixelUint(int((float(x) + 0.5f) * xscale), int((float(y) + 0.5f) * yscale)), x, y);
3865 		}
3866 		else
3867 			DE_FATAL("Unsupported scale");
3868 	}
3869 }
3870 
3871 class ImageBlitToImage : public CmdCommand
3872 {
3873 public:
ImageBlitToImage(BlitScale scale,vk::VkImageLayout imageLayout)3874 									ImageBlitToImage	(BlitScale scale, vk::VkImageLayout imageLayout) : m_scale(scale), m_imageLayout(imageLayout) {}
~ImageBlitToImage(void)3875 									~ImageBlitToImage	(void) {}
getName(void) const3876 	const char*						getName				(void) const { return "ImageBlitToImage"; }
3877 
3878 	void							logPrepare			(TestLog& log, size_t commandIndex) const;
3879 	void							prepare				(PrepareContext& context);
3880 	void							logSubmit			(TestLog& log, size_t commandIndex) const;
3881 	void							submit				(SubmitContext& context);
3882 	void							verify				(VerifyContext& context, size_t commandIndex);
3883 
3884 private:
3885 	const BlitScale					m_scale;
3886 	const vk::VkImageLayout			m_imageLayout;
3887 	deInt32							m_imageWidth;
3888 	deInt32							m_imageHeight;
3889 	vk::VkDeviceSize				m_imageMemorySize;
3890 	deInt32							m_dstImageWidth;
3891 	deInt32							m_dstImageHeight;
3892 	vk::Move<vk::VkImage>			m_dstImage;
3893 	vk::Move<vk::VkDeviceMemory>	m_memory;
3894 };
3895 
logPrepare(TestLog & log,size_t commandIndex) const3896 void ImageBlitToImage::logPrepare (TestLog& log, size_t commandIndex) const
3897 {
3898 	log << TestLog::Message << commandIndex << ":" << getName() << " Allocate destination image for image to image blit." << TestLog::EndMessage;
3899 }
3900 
prepare(PrepareContext & context)3901 void ImageBlitToImage::prepare (PrepareContext& context)
3902 {
3903 	const vk::InstanceInterface&	vki				= context.getContext().getInstanceInterface();
3904 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
3905 	const vk::VkPhysicalDevice		physicalDevice	= context.getContext().getPhysicalDevice();
3906 	const vk::VkDevice				device			= context.getContext().getDevice();
3907 	const vk::VkQueue				queue			= context.getContext().getQueue();
3908 	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
3909 	const vector<deUint32>&			queueFamilies	= context.getContext().getQueueFamilies();
3910 
3911 	m_imageWidth		= context.getImageWidth();
3912 	m_imageHeight		= context.getImageHeight();
3913 	m_imageMemorySize	= context.getImageMemorySize();
3914 
3915 	if (m_scale == BLIT_SCALE_10)
3916 	{
3917 		m_dstImageWidth		= context.getImageWidth();
3918 		m_dstImageHeight	= context.getImageHeight();
3919 	}
3920 	else if (m_scale == BLIT_SCALE_20)
3921 	{
3922 		m_dstImageWidth		= context.getImageWidth() * 2;
3923 		m_dstImageHeight	= context.getImageHeight() * 2;
3924 	}
3925 	else
3926 		DE_FATAL("Unsupportd blit scale");
3927 
3928 	{
3929 		const vk::VkImageCreateInfo	createInfo =
3930 		{
3931 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3932 			DE_NULL,
3933 
3934 			0,
3935 			vk::VK_IMAGE_TYPE_2D,
3936 			vk::VK_FORMAT_R8G8B8A8_UNORM,
3937 			{
3938 				(deUint32)m_dstImageWidth,
3939 				(deUint32)m_dstImageHeight,
3940 				1u,
3941 			},
3942 			1, 1, // mipLevels, arrayLayers
3943 			vk::VK_SAMPLE_COUNT_1_BIT,
3944 
3945 			vk::VK_IMAGE_TILING_OPTIMAL,
3946 			vk::VK_IMAGE_USAGE_TRANSFER_DST_BIT|vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
3947 			vk::VK_SHARING_MODE_EXCLUSIVE,
3948 
3949 			(deUint32)queueFamilies.size(),
3950 			&queueFamilies[0],
3951 			vk::VK_IMAGE_LAYOUT_UNDEFINED
3952 		};
3953 
3954 		m_dstImage = vk::createImage(vkd, device, &createInfo);
3955 	}
3956 
3957 	m_memory = bindImageMemory(vki, vkd, physicalDevice, device, *m_dstImage, 0);
3958 
3959 	{
3960 		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
3961 		const vk::VkImageMemoryBarrier			barrier			=
3962 		{
3963 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
3964 			DE_NULL,
3965 
3966 			0,
3967 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
3968 
3969 			vk::VK_IMAGE_LAYOUT_UNDEFINED,
3970 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
3971 
3972 			VK_QUEUE_FAMILY_IGNORED,
3973 			VK_QUEUE_FAMILY_IGNORED,
3974 
3975 			*m_dstImage,
3976 			{
3977 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
3978 				0,	// Mip level
3979 				1,	// Mip level count
3980 				0,	// Layer
3981 				1	// Layer count
3982 			}
3983 		};
3984 
3985 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &barrier);
3986 
3987 		endCommandBuffer(vkd, *commandBuffer);
3988 		submitCommandsAndWait(vkd, device, queue, *commandBuffer);
3989 	}
3990 }
3991 
logSubmit(TestLog & log,size_t commandIndex) const3992 void ImageBlitToImage::logSubmit (TestLog& log, size_t commandIndex) const
3993 {
3994 	log << TestLog::Message << commandIndex << ":" << getName() << " Blit image to another image" << (m_scale == BLIT_SCALE_20 ? " scale 2x" : "")  << TestLog::EndMessage;
3995 }
3996 
submit(SubmitContext & context)3997 void ImageBlitToImage::submit (SubmitContext& context)
3998 {
3999 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
4000 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
4001 	const vk::VkImageBlit		region			=
4002 	{
4003 		// Src
4004 		{
4005 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
4006 			0,	// mipLevel
4007 			0,	// arrayLayer
4008 			1	// layerCount
4009 		},
4010 		{
4011 			{ 0, 0, 0 },
4012 			{
4013 				m_imageWidth,
4014 				m_imageHeight,
4015 				1
4016 			},
4017 		},
4018 
4019 		// Dst
4020 		{
4021 			vk::VK_IMAGE_ASPECT_COLOR_BIT,
4022 			0,	// mipLevel
4023 			0,	// arrayLayer
4024 			1	// layerCount
4025 		},
4026 		{
4027 			{ 0, 0, 0 },
4028 			{
4029 				m_dstImageWidth,
4030 				m_dstImageHeight,
4031 				1u
4032 			}
4033 		}
4034 	};
4035 	vkd.cmdBlitImage(commandBuffer, context.getImage(), m_imageLayout, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &region, vk::VK_FILTER_NEAREST);
4036 }
4037 
verify(VerifyContext & context,size_t commandIndex)4038 void ImageBlitToImage::verify (VerifyContext& context, size_t commandIndex)
4039 {
4040 	tcu::ResultCollector&					resultCollector	(context.getResultCollector());
4041 	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
4042 	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
4043 	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
4044 	const vk::VkDevice						device			= context.getContext().getDevice();
4045 	const vk::VkQueue						queue			= context.getContext().getQueue();
4046 	const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
4047 	const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4048 	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
4049 	const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_dstImageWidth * m_dstImageHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4050 	const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4051 	{
4052 		const vk::VkImageMemoryBarrier		imageBarrier	=
4053 		{
4054 			vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4055 			DE_NULL,
4056 
4057 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4058 			vk::VK_ACCESS_TRANSFER_READ_BIT,
4059 
4060 			vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
4061 			vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4062 
4063 			VK_QUEUE_FAMILY_IGNORED,
4064 			VK_QUEUE_FAMILY_IGNORED,
4065 
4066 			*m_dstImage,
4067 			{
4068 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
4069 				0,	// Mip level
4070 				1,	// Mip level count
4071 				0,	// Layer
4072 				1	// Layer count
4073 			}
4074 		};
4075 		const vk::VkBufferMemoryBarrier bufferBarrier =
4076 		{
4077 			vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4078 			DE_NULL,
4079 
4080 			vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4081 			vk::VK_ACCESS_HOST_READ_BIT,
4082 
4083 			VK_QUEUE_FAMILY_IGNORED,
4084 			VK_QUEUE_FAMILY_IGNORED,
4085 			*dstBuffer,
4086 			0,
4087 			VK_WHOLE_SIZE
4088 		};
4089 		const vk::VkBufferImageCopy	region =
4090 		{
4091 			0,
4092 			0, 0,
4093 			{
4094 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
4095 				0,	// mipLevel
4096 				0,	// arrayLayer
4097 				1	// layerCount
4098 			},
4099 			{ 0, 0, 0 },
4100 			{
4101 				(deUint32)m_dstImageWidth,
4102 				(deUint32)m_dstImageHeight,
4103 				1
4104 			}
4105 		};
4106 
4107 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4108 		vkd.cmdCopyImageToBuffer(*commandBuffer, *m_dstImage, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
4109 		vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4110 	}
4111 
4112 	endCommandBuffer(vkd, *commandBuffer);
4113 	submitCommandsAndWait(vkd, device, queue, *commandBuffer);
4114 
4115 	{
4116 		void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_dstImageWidth * m_dstImageHeight);
4117 
4118 		vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_dstImageWidth * m_dstImageHeight);
4119 
4120 		if (m_scale == BLIT_SCALE_10)
4121 		{
4122 			const deUint8* const			data		= (const deUint8*)ptr;
4123 			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4124 			const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
4125 
4126 			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4127 				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4128 		}
4129 		else if (m_scale == BLIT_SCALE_20)
4130 		{
4131 			const deUint8* const			data		= (const deUint8*)ptr;
4132 			const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1, data);
4133 			tcu::TextureLevel				reference	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_dstImageWidth, m_dstImageHeight, 1);
4134 
4135 			{
4136 				const ConstPixelBufferAccess&	refAccess	(context.getReferenceImage().getAccess());
4137 
4138 				for (deInt32 y = 0; y < m_dstImageHeight; y++)
4139 				for (deInt32 x = 0; x < m_dstImageWidth; x++)
4140 				{
4141 					reference.getAccess().setPixel(refAccess.getPixel(x/2, y/2), x, y);
4142 				}
4143 			}
4144 
4145 			if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), reference.getAccess(), resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4146 				resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4147 		}
4148 		else
4149 			DE_FATAL("Unknown scale");
4150 
4151 		vkd.unmapMemory(device, *memory);
4152 	}
4153 }
4154 
4155 class PrepareRenderPassContext
4156 {
4157 public:
PrepareRenderPassContext(PrepareContext & context,vk::VkRenderPass renderPass,vk::VkFramebuffer framebuffer,deInt32 targetWidth,deInt32 targetHeight)4158 								PrepareRenderPassContext	(PrepareContext&	context,
4159 															 vk::VkRenderPass	renderPass,
4160 															 vk::VkFramebuffer	framebuffer,
4161 															 deInt32			targetWidth,
4162 															 deInt32			targetHeight)
4163 		: m_context			(context)
4164 		, m_renderPass		(renderPass)
4165 		, m_framebuffer		(framebuffer)
4166 		, m_targetWidth		(targetWidth)
4167 		, m_targetHeight	(targetHeight)
4168 	{
4169 	}
4170 
getMemory(void) const4171 	const Memory&				getMemory					(void) const { return m_context.getMemory(); }
getContext(void) const4172 	const Context&				getContext					(void) const { return m_context.getContext(); }
getBinaryCollection(void) const4173 	const vk::BinaryCollection&	getBinaryCollection			(void) const { return m_context.getBinaryCollection(); }
4174 
getBuffer(void) const4175 	vk::VkBuffer				getBuffer					(void) const { return m_context.getBuffer(); }
getBufferSize(void) const4176 	vk::VkDeviceSize			getBufferSize				(void) const { return m_context.getBufferSize(); }
4177 
getImage(void) const4178 	vk::VkImage					getImage					(void) const { return m_context.getImage(); }
getImageWidth(void) const4179 	deInt32						getImageWidth				(void) const { return m_context.getImageWidth(); }
getImageHeight(void) const4180 	deInt32						getImageHeight				(void) const { return m_context.getImageHeight(); }
getImageLayout(void) const4181 	vk::VkImageLayout			getImageLayout				(void) const { return m_context.getImageLayout(); }
4182 
getTargetWidth(void) const4183 	deInt32						getTargetWidth				(void) const { return m_targetWidth; }
getTargetHeight(void) const4184 	deInt32						getTargetHeight				(void) const { return m_targetHeight; }
4185 
getRenderPass(void) const4186 	vk::VkRenderPass			getRenderPass				(void) const { return m_renderPass; }
4187 
4188 private:
4189 	PrepareContext&				m_context;
4190 	const vk::VkRenderPass		m_renderPass;
4191 	const vk::VkFramebuffer		m_framebuffer;
4192 	const deInt32				m_targetWidth;
4193 	const deInt32				m_targetHeight;
4194 };
4195 
4196 class VerifyRenderPassContext
4197 {
4198 public:
VerifyRenderPassContext(VerifyContext & context,deInt32 targetWidth,deInt32 targetHeight)4199 							VerifyRenderPassContext		(VerifyContext&			context,
4200 														 deInt32				targetWidth,
4201 														 deInt32				targetHeight)
4202 		: m_context			(context)
4203 		, m_referenceTarget	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), targetWidth, targetHeight)
4204 	{
4205 	}
4206 
getContext(void) const4207 	const Context&			getContext			(void) const { return m_context.getContext(); }
getLog(void) const4208 	TestLog&				getLog				(void) const { return m_context.getLog(); }
getResultCollector(void) const4209 	tcu::ResultCollector&	getResultCollector	(void) const { return m_context.getResultCollector(); }
4210 
getReferenceTarget(void)4211 	TextureLevel&			getReferenceTarget	(void) { return m_referenceTarget; }
4212 
getReference(void)4213 	ReferenceMemory&		getReference		(void) { return m_context.getReference(); }
getReferenceImage(void)4214 	TextureLevel&			getReferenceImage	(void) { return m_context.getReferenceImage();}
4215 
4216 private:
4217 	VerifyContext&	m_context;
4218 	TextureLevel	m_referenceTarget;
4219 };
4220 
4221 class RenderPassCommand
4222 {
4223 public:
~RenderPassCommand(void)4224 	virtual				~RenderPassCommand	(void) {}
4225 	virtual const char*	getName				(void) const = 0;
4226 
4227 	// Log things that are done during prepare
logPrepare(TestLog &,size_t) const4228 	virtual void		logPrepare			(TestLog&, size_t) const {}
4229 	// Log submitted calls etc.
logSubmit(TestLog &,size_t) const4230 	virtual void		logSubmit			(TestLog&, size_t) const {}
4231 
4232 	// Allocate vulkan resources and prepare for submit.
prepare(PrepareRenderPassContext &)4233 	virtual void		prepare				(PrepareRenderPassContext&) {}
4234 
4235 	// Submit commands to command buffer.
submit(SubmitContext &)4236 	virtual void		submit				(SubmitContext&) {}
4237 
4238 	// Verify results
verify(VerifyRenderPassContext &,size_t)4239 	virtual void		verify				(VerifyRenderPassContext&, size_t) {}
4240 };
4241 
4242 class SubmitRenderPass : public CmdCommand
4243 {
4244 public:
4245 				SubmitRenderPass	(const vector<RenderPassCommand*>& commands);
4246 				~SubmitRenderPass	(void);
getName(void) const4247 	const char*	getName				(void) const { return "SubmitRenderPass"; }
4248 
4249 	void		logPrepare			(TestLog&, size_t) const;
4250 	void		logSubmit			(TestLog&, size_t) const;
4251 
4252 	void		prepare				(PrepareContext&);
4253 	void		submit				(SubmitContext&);
4254 
4255 	void		verify				(VerifyContext&, size_t);
4256 
4257 private:
4258 	const deInt32					m_targetWidth;
4259 	const deInt32					m_targetHeight;
4260 	vk::Move<vk::VkRenderPass>		m_renderPass;
4261 	vk::Move<vk::VkDeviceMemory>	m_colorTargetMemory;
4262 	de::MovePtr<vk::Allocation>		m_colorTargetMemory2;
4263 	vk::Move<vk::VkImage>			m_colorTarget;
4264 	vk::Move<vk::VkImageView>		m_colorTargetView;
4265 	vk::Move<vk::VkFramebuffer>		m_framebuffer;
4266 	vector<RenderPassCommand*>		m_commands;
4267 };
4268 
SubmitRenderPass(const vector<RenderPassCommand * > & commands)4269 SubmitRenderPass::SubmitRenderPass (const vector<RenderPassCommand*>& commands)
4270 	: m_targetWidth		(256)
4271 	, m_targetHeight	(256)
4272 	, m_commands		(commands)
4273 {
4274 }
4275 
~SubmitRenderPass()4276 SubmitRenderPass::~SubmitRenderPass()
4277 {
4278 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4279 		delete m_commands[cmdNdx];
4280 }
4281 
logPrepare(TestLog & log,size_t commandIndex) const4282 void SubmitRenderPass::logPrepare (TestLog& log, size_t commandIndex) const
4283 {
4284 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
4285 	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
4286 
4287 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4288 	{
4289 		RenderPassCommand& command = *m_commands[cmdNdx];
4290 		command.logPrepare(log, cmdNdx);
4291 	}
4292 }
4293 
logSubmit(TestLog & log,size_t commandIndex) const4294 void SubmitRenderPass::logSubmit (TestLog& log, size_t commandIndex) const
4295 {
4296 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
4297 	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
4298 
4299 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4300 	{
4301 		RenderPassCommand& command = *m_commands[cmdNdx];
4302 		command.logSubmit(log, cmdNdx);
4303 	}
4304 }
4305 
prepare(PrepareContext & context)4306 void SubmitRenderPass::prepare (PrepareContext& context)
4307 {
4308 	const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
4309 	const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
4310 	const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
4311 	const vk::VkDevice						device			= context.getContext().getDevice();
4312 	const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
4313 
4314 	{
4315 		const vk::VkImageCreateInfo createInfo =
4316 		{
4317 			vk::VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
4318 			DE_NULL,
4319 			0u,
4320 
4321 			vk::VK_IMAGE_TYPE_2D,
4322 			vk::VK_FORMAT_R8G8B8A8_UNORM,
4323 			{ (deUint32)m_targetWidth, (deUint32)m_targetHeight, 1u },
4324 			1u,
4325 			1u,
4326 			vk::VK_SAMPLE_COUNT_1_BIT,
4327 			vk::VK_IMAGE_TILING_OPTIMAL,
4328 			vk::VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | vk::VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
4329 			vk::VK_SHARING_MODE_EXCLUSIVE,
4330 			(deUint32)queueFamilies.size(),
4331 			&queueFamilies[0],
4332 			vk::VK_IMAGE_LAYOUT_UNDEFINED
4333 		};
4334 
4335 		m_colorTarget = vk::createImage(vkd, device, &createInfo);
4336 	}
4337 
4338 	m_colorTargetMemory = bindImageMemory(vki, vkd, physicalDevice, device, *m_colorTarget, 0);
4339 
4340 	{
4341 		const vk::VkImageViewCreateInfo createInfo =
4342 		{
4343 			vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
4344 			DE_NULL,
4345 
4346 			0u,
4347 			*m_colorTarget,
4348 			vk::VK_IMAGE_VIEW_TYPE_2D,
4349 			vk::VK_FORMAT_R8G8B8A8_UNORM,
4350 			{
4351 				vk::VK_COMPONENT_SWIZZLE_R,
4352 				vk::VK_COMPONENT_SWIZZLE_G,
4353 				vk::VK_COMPONENT_SWIZZLE_B,
4354 				vk::VK_COMPONENT_SWIZZLE_A
4355 			},
4356 			{
4357 				vk::VK_IMAGE_ASPECT_COLOR_BIT,
4358 				0u,
4359 				1u,
4360 				0u,
4361 				1u
4362 			}
4363 		};
4364 
4365 		m_colorTargetView = vk::createImageView(vkd, device, &createInfo);
4366 	}
4367 
4368 	m_renderPass = vk::makeRenderPass(vkd, device, vk::VK_FORMAT_R8G8B8A8_UNORM, vk::VK_FORMAT_UNDEFINED, vk::VK_ATTACHMENT_LOAD_OP_CLEAR, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
4369 
4370 	{
4371 		const vk::VkImageView				imageViews[]	=
4372 		{
4373 			*m_colorTargetView
4374 		};
4375 		const vk::VkFramebufferCreateInfo	createInfo		=
4376 		{
4377 			vk::VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
4378 			DE_NULL,
4379 			0u,
4380 
4381 			*m_renderPass,
4382 			DE_LENGTH_OF_ARRAY(imageViews),
4383 			imageViews,
4384 			(deUint32)m_targetWidth,
4385 			(deUint32)m_targetHeight,
4386 			1u
4387 		};
4388 
4389 		m_framebuffer = vk::createFramebuffer(vkd, device, &createInfo);
4390 	}
4391 
4392 	{
4393 		PrepareRenderPassContext renderpassContext (context, *m_renderPass, *m_framebuffer, m_targetWidth, m_targetHeight);
4394 
4395 		for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4396 		{
4397 			RenderPassCommand& command = *m_commands[cmdNdx];
4398 			command.prepare(renderpassContext);
4399 		}
4400 	}
4401 }
4402 
submit(SubmitContext & context)4403 void SubmitRenderPass::submit (SubmitContext& context)
4404 {
4405 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
4406 	const vk::VkCommandBuffer		commandBuffer	= context.getCommandBuffer();
4407 
4408 	beginRenderPass(vkd, commandBuffer, *m_renderPass, *m_framebuffer, vk::makeRect2D(0, 0, m_targetWidth, m_targetHeight), tcu::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4409 
4410 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4411 	{
4412 		RenderPassCommand& command = *m_commands[cmdNdx];
4413 
4414 		command.submit(context);
4415 	}
4416 
4417 	endRenderPass(vkd, commandBuffer);
4418 }
4419 
verify(VerifyContext & context,size_t commandIndex)4420 void SubmitRenderPass::verify (VerifyContext& context, size_t commandIndex)
4421 {
4422 	TestLog&					log				(context.getLog());
4423 	tcu::ResultCollector&		resultCollector	(context.getResultCollector());
4424 	const string				sectionName		(de::toString(commandIndex) + ":" + getName());
4425 	const tcu::ScopedLogSection	section			(log, sectionName, sectionName);
4426 	VerifyRenderPassContext		verifyContext	(context, m_targetWidth, m_targetHeight);
4427 
4428 	tcu::clear(verifyContext.getReferenceTarget().getAccess(), Vec4(0.0f, 0.0f, 0.0f, 1.0f));
4429 
4430 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4431 	{
4432 		RenderPassCommand& command = *m_commands[cmdNdx];
4433 		command.verify(verifyContext, cmdNdx);
4434 	}
4435 
4436 	{
4437 		const vk::InstanceInterface&			vki				= context.getContext().getInstanceInterface();
4438 		const vk::DeviceInterface&				vkd				= context.getContext().getDeviceInterface();
4439 		const vk::VkPhysicalDevice				physicalDevice	= context.getContext().getPhysicalDevice();
4440 		const vk::VkDevice						device			= context.getContext().getDevice();
4441 		const vk::VkQueue						queue			= context.getContext().getQueue();
4442 		const vk::VkCommandPool					commandPool		= context.getContext().getCommandPool();
4443 		const vk::Unique<vk::VkCommandBuffer>	commandBuffer	(createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_PRIMARY));
4444 		const vector<deUint32>&					queueFamilies	= context.getContext().getQueueFamilies();
4445 		const vk::Unique<vk::VkBuffer>			dstBuffer		(createBuffer(vkd, device, 4 * m_targetWidth * m_targetHeight, vk::VK_BUFFER_USAGE_TRANSFER_DST_BIT, vk::VK_SHARING_MODE_EXCLUSIVE, queueFamilies));
4446 		const vk::Unique<vk::VkDeviceMemory>	memory			(bindBufferMemory(vki, vkd, physicalDevice, device, *dstBuffer, vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT));
4447 		{
4448 			const vk::VkImageMemoryBarrier		imageBarrier	=
4449 			{
4450 				vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
4451 				DE_NULL,
4452 
4453 				vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
4454 				vk::VK_ACCESS_TRANSFER_READ_BIT,
4455 
4456 				vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4457 				vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4458 
4459 				VK_QUEUE_FAMILY_IGNORED,
4460 				VK_QUEUE_FAMILY_IGNORED,
4461 
4462 				*m_colorTarget,
4463 				{
4464 					vk::VK_IMAGE_ASPECT_COLOR_BIT,
4465 					0,	// Mip level
4466 					1,	// Mip level count
4467 					0,	// Layer
4468 					1	// Layer count
4469 				}
4470 			};
4471 			const vk::VkBufferMemoryBarrier bufferBarrier =
4472 			{
4473 				vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
4474 				DE_NULL,
4475 
4476 				vk::VK_ACCESS_TRANSFER_WRITE_BIT,
4477 				vk::VK_ACCESS_HOST_READ_BIT,
4478 
4479 				VK_QUEUE_FAMILY_IGNORED,
4480 				VK_QUEUE_FAMILY_IGNORED,
4481 				*dstBuffer,
4482 				0,
4483 				VK_WHOLE_SIZE
4484 			};
4485 			const vk::VkBufferImageCopy	region =
4486 			{
4487 				0,
4488 				0, 0,
4489 				{
4490 					vk::VK_IMAGE_ASPECT_COLOR_BIT,
4491 					0,	// mipLevel
4492 					0,	// arrayLayer
4493 					1	// layerCount
4494 				},
4495 				{ 0, 0, 0 },
4496 				{
4497 					(deUint32)m_targetWidth,
4498 					(deUint32)m_targetHeight,
4499 					1u
4500 				}
4501 			};
4502 
4503 			vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 0, (const vk::VkBufferMemoryBarrier*)DE_NULL, 1, &imageBarrier);
4504 			vkd.cmdCopyImageToBuffer(*commandBuffer, *m_colorTarget, vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, *dstBuffer, 1, &region);
4505 			vkd.cmdPipelineBarrier(*commandBuffer, vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_PIPELINE_STAGE_HOST_BIT, (vk::VkDependencyFlags)0, 0, (const vk::VkMemoryBarrier*)DE_NULL, 1, &bufferBarrier, 0, (const vk::VkImageMemoryBarrier*)DE_NULL);
4506 		}
4507 
4508 		endCommandBuffer(vkd, *commandBuffer);
4509 		submitCommandsAndWait(vkd, device, queue, *commandBuffer);
4510 
4511 		{
4512 			void* const	ptr		= mapMemory(vkd, device, *memory, 4 * m_targetWidth * m_targetHeight);
4513 
4514 			vk::invalidateMappedMemoryRange(vkd, device, *memory, 0,  4 * m_targetWidth * m_targetHeight);
4515 
4516 			{
4517 				const deUint8* const			data		= (const deUint8*)ptr;
4518 				const ConstPixelBufferAccess	resAccess	(TextureFormat(TextureFormat::RGBA, TextureFormat::UNORM_INT8), m_targetWidth, m_targetHeight, 1, data);
4519 				const ConstPixelBufferAccess&	refAccess	(verifyContext.getReferenceTarget().getAccess());
4520 
4521 				if (!tcu::intThresholdCompare(context.getLog(), (de::toString(commandIndex) + ":" + getName()).c_str(), (de::toString(commandIndex) + ":" + getName()).c_str(), refAccess, resAccess, UVec4(0), tcu::COMPARE_LOG_ON_ERROR))
4522 					resultCollector.fail(de::toString(commandIndex) + ":" + getName() + " Image comparison failed");
4523 			}
4524 
4525 			vkd.unmapMemory(device, *memory);
4526 		}
4527 	}
4528 }
4529 
4530 class ExecuteSecondaryCommandBuffer : public CmdCommand
4531 {
4532 public:
4533 				ExecuteSecondaryCommandBuffer	(const vector<CmdCommand*>& commands);
4534 				~ExecuteSecondaryCommandBuffer	(void);
getName(void) const4535 	const char*	getName							(void) const { return "ExecuteSecondaryCommandBuffer"; }
4536 
4537 	void		logPrepare						(TestLog&, size_t) const;
4538 	void		logSubmit						(TestLog&, size_t) const;
4539 
4540 	void		prepare							(PrepareContext&);
4541 	void		submit							(SubmitContext&);
4542 
4543 	void		verify							(VerifyContext&, size_t);
4544 
4545 private:
4546 	vk::Move<vk::VkCommandBuffer>				m_commandBuffer;
4547 	vk::Move<vk::VkDeviceMemory>				m_colorTargetMemory;
4548 	de::MovePtr<vk::Allocation>					m_colorTargetMemory2;
4549 	vk::Move<vk::VkImage>						m_colorTarget;
4550 	vk::Move<vk::VkImageView>					m_colorTargetView;
4551 	vk::Move<vk::VkFramebuffer>					m_framebuffer;
4552 	vector<CmdCommand*>							m_commands;
4553 };
4554 
ExecuteSecondaryCommandBuffer(const vector<CmdCommand * > & commands)4555 ExecuteSecondaryCommandBuffer::ExecuteSecondaryCommandBuffer(const vector<CmdCommand*>& commands)
4556 	: m_commands		(commands)
4557 {
4558 }
4559 
~ExecuteSecondaryCommandBuffer(void)4560 ExecuteSecondaryCommandBuffer::~ExecuteSecondaryCommandBuffer (void)
4561 {
4562 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4563 		delete m_commands[cmdNdx];
4564 }
4565 
logPrepare(TestLog & log,size_t commandIndex) const4566 void ExecuteSecondaryCommandBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4567 {
4568 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
4569 	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
4570 
4571 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4572 	{
4573 		CmdCommand& command = *m_commands[cmdNdx];
4574 		command.logPrepare(log, cmdNdx);
4575 	}
4576 }
4577 
logSubmit(TestLog & log,size_t commandIndex) const4578 void ExecuteSecondaryCommandBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4579 {
4580 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
4581 	const tcu::ScopedLogSection	section		(log, sectionName, sectionName);
4582 
4583 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4584 	{
4585 		CmdCommand& command = *m_commands[cmdNdx];
4586 		command.logSubmit(log, cmdNdx);
4587 	}
4588 }
4589 
prepare(PrepareContext & context)4590 void ExecuteSecondaryCommandBuffer::prepare (PrepareContext& context)
4591 {
4592 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
4593 	const vk::VkDevice				device			= context.getContext().getDevice();
4594 	const vk::VkCommandPool			commandPool		= context.getContext().getCommandPool();
4595 
4596 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4597 	{
4598 		CmdCommand& command = *m_commands[cmdNdx];
4599 
4600 		command.prepare(context);
4601 	}
4602 
4603 	m_commandBuffer = createBeginCommandBuffer(vkd, device, commandPool, vk::VK_COMMAND_BUFFER_LEVEL_SECONDARY);
4604 	{
4605 		SubmitContext submitContext (context, *m_commandBuffer);
4606 
4607 		for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4608 		{
4609 			CmdCommand& command = *m_commands[cmdNdx];
4610 
4611 			command.submit(submitContext);
4612 		}
4613 
4614 		endCommandBuffer(vkd, *m_commandBuffer);
4615 	}
4616 }
4617 
submit(SubmitContext & context)4618 void ExecuteSecondaryCommandBuffer::submit (SubmitContext& context)
4619 {
4620 	const vk::DeviceInterface&		vkd				= context.getContext().getDeviceInterface();
4621 	const vk::VkCommandBuffer		commandBuffer	= context.getCommandBuffer();
4622 
4623 
4624 	{
4625 		vkd.cmdExecuteCommands(commandBuffer, 1, &m_commandBuffer.get());
4626 	}
4627 }
4628 
verify(VerifyContext & context,size_t commandIndex)4629 void ExecuteSecondaryCommandBuffer::verify (VerifyContext& context, size_t commandIndex)
4630 {
4631 	const string				sectionName	(de::toString(commandIndex) + ":" + getName());
4632 	const tcu::ScopedLogSection	section		(context.getLog(), sectionName, sectionName);
4633 
4634 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
4635 		m_commands[cmdNdx]->verify(context, cmdNdx);
4636 }
4637 
4638 struct PipelineResources
4639 {
4640 	vk::Move<vk::VkPipeline>			pipeline;
4641 	vk::Move<vk::VkDescriptorSetLayout>	descriptorSetLayout;
4642 	vk::Move<vk::VkPipelineLayout>		pipelineLayout;
4643 };
4644 
createPipelineWithResources(const vk::DeviceInterface & vkd,const vk::VkDevice device,const vk::VkRenderPass renderPass,const deUint32 subpass,const vk::VkShaderModule & vertexShaderModule,const vk::VkShaderModule & fragmentShaderModule,const deUint32 viewPortWidth,const deUint32 viewPortHeight,const vector<vk::VkVertexInputBindingDescription> & vertexBindingDescriptions,const vector<vk::VkVertexInputAttributeDescription> & vertexAttributeDescriptions,const vector<vk::VkDescriptorSetLayoutBinding> & bindings,const vk::VkPrimitiveTopology topology,deUint32 pushConstantRangeCount,const vk::VkPushConstantRange * pushConstantRanges,PipelineResources & resources)4645 void createPipelineWithResources (const vk::DeviceInterface&							vkd,
4646 								  const vk::VkDevice									device,
4647 								  const vk::VkRenderPass								renderPass,
4648 								  const deUint32										subpass,
4649 								  const vk::VkShaderModule&								vertexShaderModule,
4650 								  const vk::VkShaderModule&								fragmentShaderModule,
4651 								  const deUint32										viewPortWidth,
4652 								  const deUint32										viewPortHeight,
4653 								  const vector<vk::VkVertexInputBindingDescription>&	vertexBindingDescriptions,
4654 								  const vector<vk::VkVertexInputAttributeDescription>&	vertexAttributeDescriptions,
4655 								  const vector<vk::VkDescriptorSetLayoutBinding>&		bindings,
4656 								  const vk::VkPrimitiveTopology							topology,
4657 								  deUint32												pushConstantRangeCount,
4658 								  const vk::VkPushConstantRange*						pushConstantRanges,
4659 								  PipelineResources&									resources)
4660 {
4661 	if (!bindings.empty())
4662 	{
4663 		const vk::VkDescriptorSetLayoutCreateInfo createInfo =
4664 		{
4665 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
4666 			DE_NULL,
4667 
4668 			0u,
4669 			(deUint32)bindings.size(),
4670 			bindings.empty() ? DE_NULL : &bindings[0]
4671 		};
4672 
4673 		resources.descriptorSetLayout = vk::createDescriptorSetLayout(vkd, device, &createInfo);
4674 	}
4675 
4676 	{
4677 		const vk::VkDescriptorSetLayout			descriptorSetLayout_	= *resources.descriptorSetLayout;
4678 		const vk::VkPipelineLayoutCreateInfo	createInfo				=
4679 		{
4680 			vk::VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
4681 			DE_NULL,
4682 			0,
4683 
4684 			resources.descriptorSetLayout ? 1u : 0u,
4685 			resources.descriptorSetLayout ? &descriptorSetLayout_ : DE_NULL,
4686 
4687 			pushConstantRangeCount,
4688 			pushConstantRanges
4689 		};
4690 
4691 		resources.pipelineLayout = vk::createPipelineLayout(vkd, device, &createInfo);
4692 	}
4693 
4694 	{
4695 		const std::vector<vk::VkViewport>				viewports			(1, vk::makeViewport(0.0f, 0.0f, (float)viewPortWidth, (float)viewPortHeight, 0.0f, 1.0f));
4696 		const std::vector<vk::VkRect2D>					scissors			(1, vk::makeRect2D(0, 0, viewPortWidth, viewPortHeight));
4697 
4698 		const vk::VkPipelineVertexInputStateCreateInfo	vertexInputState	=
4699 		{
4700 			vk::VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
4701 			DE_NULL,
4702 			0u,
4703 
4704 			(deUint32)vertexBindingDescriptions.size(),
4705 			vertexBindingDescriptions.empty() ? DE_NULL : &vertexBindingDescriptions[0],
4706 
4707 			(deUint32)vertexAttributeDescriptions.size(),
4708 			vertexAttributeDescriptions.empty() ? DE_NULL : &vertexAttributeDescriptions[0]
4709 		};
4710 
4711 		resources.pipeline = vk::makeGraphicsPipeline(vkd,							// const DeviceInterface&                        vk
4712 													  device,						// const VkDevice                                device
4713 													  *resources.pipelineLayout,	// const VkPipelineLayout                        pipelineLayout
4714 													  vertexShaderModule,			// const VkShaderModule                          vertexShaderModule
4715 													  DE_NULL,						// const VkShaderModule                          tessellationControlModule
4716 													  DE_NULL,						// const VkShaderModule                          tessellationEvalModule
4717 													  DE_NULL,						// const VkShaderModule                          geometryShaderModule
4718 													  fragmentShaderModule,			// const VkShaderModule                          fragmentShaderModule
4719 													  renderPass,					// const VkRenderPass                            renderPass
4720 													  viewports,					// const std::vector<VkViewport>&                viewports
4721 													  scissors,						// const std::vector<VkRect2D>&                  scissors
4722 													  topology,						// const VkPrimitiveTopology                     topology
4723 													  subpass,						// const deUint32                                subpass
4724 													  0u,							// const deUint32                                patchControlPoints
4725 													  &vertexInputState);			// const VkPipelineVertexInputStateCreateInfo*   vertexInputStateCreateInfo
4726 	}
4727 }
4728 
4729 class RenderIndexBuffer : public RenderPassCommand
4730 {
4731 public:
RenderIndexBuffer(void)4732 				RenderIndexBuffer	(void) {}
~RenderIndexBuffer(void)4733 				~RenderIndexBuffer	(void) {}
4734 
getName(void) const4735 	const char*	getName				(void) const { return "RenderIndexBuffer"; }
4736 	void		logPrepare			(TestLog&, size_t) const;
4737 	void		logSubmit			(TestLog&, size_t) const;
4738 	void		prepare				(PrepareRenderPassContext&);
4739 	void		submit				(SubmitContext& context);
4740 	void		verify				(VerifyRenderPassContext&, size_t);
4741 
4742 private:
4743 	PipelineResources				m_resources;
4744 	vk::VkDeviceSize				m_bufferSize;
4745 };
4746 
logPrepare(TestLog & log,size_t commandIndex) const4747 void RenderIndexBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4748 {
4749 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as index buffer." << TestLog::EndMessage;
4750 }
4751 
logSubmit(TestLog & log,size_t commandIndex) const4752 void RenderIndexBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4753 {
4754 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as index buffer." << TestLog::EndMessage;
4755 }
4756 
prepare(PrepareRenderPassContext & context)4757 void RenderIndexBuffer::prepare (PrepareRenderPassContext& context)
4758 {
4759 	const vk::DeviceInterface&				vkd						= context.getContext().getDeviceInterface();
4760 	const vk::VkDevice						device					= context.getContext().getDevice();
4761 	const vk::VkRenderPass					renderPass				= context.getRenderPass();
4762 	const deUint32							subpass					= 0;
4763 	const vk::Unique<vk::VkShaderModule>	vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("index-buffer.vert"), 0));
4764 	const vk::Unique<vk::VkShaderModule>	fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4765 
4766 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
4767 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), vector<vk::VkDescriptorSetLayoutBinding>(), vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
4768 	m_bufferSize = context.getBufferSize();
4769 }
4770 
submit(SubmitContext & context)4771 void RenderIndexBuffer::submit (SubmitContext& context)
4772 {
4773 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
4774 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
4775 
4776 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
4777 	vkd.cmdBindIndexBuffer(commandBuffer, context.getBuffer(), 0, vk::VK_INDEX_TYPE_UINT16);
4778 	vkd.cmdDrawIndexed(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0, 0);
4779 }
4780 
verify(VerifyRenderPassContext & context,size_t)4781 void RenderIndexBuffer::verify (VerifyRenderPassContext& context, size_t)
4782 {
4783 	for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4784 	{
4785 		const deUint8 x  = context.getReference().get(pos * 2);
4786 		const deUint8 y  = context.getReference().get((pos * 2) + 1);
4787 
4788 		context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4789 	}
4790 }
4791 
4792 class RenderVertexBuffer : public RenderPassCommand
4793 {
4794 public:
RenderVertexBuffer(void)4795 				RenderVertexBuffer	(void) {}
~RenderVertexBuffer(void)4796 				~RenderVertexBuffer	(void) {}
4797 
getName(void) const4798 	const char*	getName				(void) const { return "RenderVertexBuffer"; }
4799 	void		logPrepare			(TestLog&, size_t) const;
4800 	void		logSubmit			(TestLog&, size_t) const;
4801 	void		prepare				(PrepareRenderPassContext&);
4802 	void		submit				(SubmitContext& context);
4803 	void		verify				(VerifyRenderPassContext&, size_t);
4804 
4805 private:
4806 	PipelineResources	m_resources;
4807 	vk::VkDeviceSize	m_bufferSize;
4808 };
4809 
logPrepare(TestLog & log,size_t commandIndex) const4810 void RenderVertexBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4811 {
4812 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as vertex buffer." << TestLog::EndMessage;
4813 }
4814 
logSubmit(TestLog & log,size_t commandIndex) const4815 void RenderVertexBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4816 {
4817 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as vertex buffer." << TestLog::EndMessage;
4818 }
4819 
prepare(PrepareRenderPassContext & context)4820 void RenderVertexBuffer::prepare (PrepareRenderPassContext& context)
4821 {
4822 	const vk::DeviceInterface&						vkd						= context.getContext().getDeviceInterface();
4823 	const vk::VkDevice								device					= context.getContext().getDevice();
4824 	const vk::VkRenderPass							renderPass				= context.getRenderPass();
4825 	const deUint32									subpass					= 0;
4826 	const vk::Unique<vk::VkShaderModule>			vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("vertex-buffer.vert"), 0));
4827 	const vk::Unique<vk::VkShaderModule>			fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4828 
4829 	vector<vk::VkVertexInputAttributeDescription>	vertexAttributeDescriptions;
4830 	vector<vk::VkVertexInputBindingDescription>		vertexBindingDescriptions;
4831 
4832 	{
4833 		const vk::VkVertexInputBindingDescription vertexBindingDescription =
4834 			{
4835 				0,
4836 				2,
4837 				vk::VK_VERTEX_INPUT_RATE_VERTEX
4838 			};
4839 
4840 		vertexBindingDescriptions.push_back(vertexBindingDescription);
4841 	}
4842 	{
4843 		const vk::VkVertexInputAttributeDescription vertexAttributeDescription =
4844 		{
4845 			0,
4846 			0,
4847 			vk::VK_FORMAT_R8G8_UNORM,
4848 			0
4849 		};
4850 
4851 		vertexAttributeDescriptions.push_back(vertexAttributeDescription);
4852 	}
4853 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
4854 								vertexBindingDescriptions, vertexAttributeDescriptions, vector<vk::VkDescriptorSetLayoutBinding>(), vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
4855 
4856 	m_bufferSize = context.getBufferSize();
4857 }
4858 
submit(SubmitContext & context)4859 void RenderVertexBuffer::submit (SubmitContext& context)
4860 {
4861 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
4862 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
4863 	const vk::VkDeviceSize		offset			= 0;
4864 	const vk::VkBuffer			buffer			= context.getBuffer();
4865 
4866 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
4867 	vkd.cmdBindVertexBuffers(commandBuffer, 0, 1, &buffer, &offset);
4868 	vkd.cmdDraw(commandBuffer, (deUint32)(context.getBufferSize() / 2), 1, 0, 0);
4869 }
4870 
verify(VerifyRenderPassContext & context,size_t)4871 void RenderVertexBuffer::verify (VerifyRenderPassContext& context, size_t)
4872 {
4873 	for (size_t pos = 0; pos < (size_t)m_bufferSize / 2; pos++)
4874 	{
4875 		const deUint8 x  = context.getReference().get(pos * 2);
4876 		const deUint8 y  = context.getReference().get((pos * 2) + 1);
4877 
4878 		context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
4879 	}
4880 }
4881 
4882 class RenderVertexUniformBuffer : public RenderPassCommand
4883 {
4884 public:
RenderVertexUniformBuffer(void)4885 									RenderVertexUniformBuffer	(void) {}
4886 									~RenderVertexUniformBuffer	(void);
4887 
getName(void) const4888 	const char*						getName						(void) const { return "RenderVertexUniformBuffer"; }
4889 	void							logPrepare					(TestLog&, size_t) const;
4890 	void							logSubmit					(TestLog&, size_t) const;
4891 	void							prepare						(PrepareRenderPassContext&);
4892 	void							submit						(SubmitContext& context);
4893 	void							verify						(VerifyRenderPassContext&, size_t);
4894 
4895 private:
4896 	PipelineResources				m_resources;
4897 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
4898 	vector<vk::VkDescriptorSet>		m_descriptorSets;
4899 
4900 	vk::VkDeviceSize				m_bufferSize;
4901 };
4902 
~RenderVertexUniformBuffer(void)4903 RenderVertexUniformBuffer::~RenderVertexUniformBuffer (void)
4904 {
4905 }
4906 
logPrepare(TestLog & log,size_t commandIndex) const4907 void RenderVertexUniformBuffer::logPrepare (TestLog& log, size_t commandIndex) const
4908 {
4909 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
4910 }
4911 
logSubmit(TestLog & log,size_t commandIndex) const4912 void RenderVertexUniformBuffer::logSubmit (TestLog& log, size_t commandIndex) const
4913 {
4914 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
4915 }
4916 
prepare(PrepareRenderPassContext & context)4917 void RenderVertexUniformBuffer::prepare (PrepareRenderPassContext& context)
4918 {
4919 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
4920 	const vk::VkDevice							device					= context.getContext().getDevice();
4921 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
4922 	const deUint32								subpass					= 0;
4923 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-buffer.vert"), 0));
4924 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
4925 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
4926 
4927 	m_bufferSize = context.getBufferSize();
4928 
4929 	{
4930 		const vk::VkDescriptorSetLayoutBinding binding =
4931 		{
4932 			0u,
4933 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
4934 			1,
4935 			vk::VK_SHADER_STAGE_VERTEX_BIT,
4936 			DE_NULL
4937 		};
4938 
4939 		bindings.push_back(binding);
4940 	}
4941 
4942 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
4943 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
4944 
4945 	{
4946 		const deUint32							descriptorCount	= (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE));
4947 		const vk::VkDescriptorPoolSize			poolSizes		=
4948 		{
4949 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
4950 			descriptorCount
4951 		};
4952 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
4953 		{
4954 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
4955 			DE_NULL,
4956 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
4957 
4958 			descriptorCount,
4959 			1u,
4960 			&poolSizes,
4961 		};
4962 
4963 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
4964 		m_descriptorSets.resize(descriptorCount);
4965 	}
4966 
4967 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
4968 	{
4969 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
4970 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
4971 		{
4972 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
4973 			DE_NULL,
4974 
4975 			*m_descriptorPool,
4976 			1,
4977 			&layout
4978 		};
4979 
4980 		m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
4981 
4982 		{
4983 			const vk::VkDescriptorBufferInfo		bufferInfo	=
4984 			{
4985 				context.getBuffer(),
4986 				(vk::VkDeviceSize)(descriptorSetNdx * (size_t)MAX_UNIFORM_BUFFER_SIZE),
4987 				m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
4988 					? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
4989 					: (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
4990 			};
4991 			const vk::VkWriteDescriptorSet			write		=
4992 			{
4993 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
4994 				DE_NULL,
4995 				m_descriptorSets[descriptorSetNdx],
4996 				0u,
4997 				0u,
4998 				1u,
4999 				vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
5000 				DE_NULL,
5001 				&bufferInfo,
5002 				DE_NULL,
5003 			};
5004 
5005 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5006 		}
5007 	}
5008 }
5009 
submit(SubmitContext & context)5010 void RenderVertexUniformBuffer::submit (SubmitContext& context)
5011 {
5012 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
5013 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
5014 
5015 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5016 
5017 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5018 	{
5019 		const size_t	size	= (size_t)(m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5020 								? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5021 								: (size_t)MAX_UNIFORM_BUFFER_SIZE);
5022 		const deUint32	count	= (deUint32)(size / 2);
5023 
5024 		vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5025 		vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5026 	}
5027 }
5028 
verify(VerifyRenderPassContext & context,size_t)5029 void RenderVertexUniformBuffer::verify (VerifyRenderPassContext& context, size_t)
5030 {
5031 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5032 	{
5033 		const size_t	offset	= descriptorSetNdx * MAX_UNIFORM_BUFFER_SIZE;
5034 		const size_t	size	= (size_t)(m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5035 								? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
5036 								: (size_t)MAX_UNIFORM_BUFFER_SIZE);
5037 		const size_t	count	= size / 2;
5038 
5039 		for (size_t pos = 0; pos < count; pos++)
5040 		{
5041 			const deUint8 x  = context.getReference().get(offset + pos * 2);
5042 			const deUint8 y  = context.getReference().get(offset + (pos * 2) + 1);
5043 
5044 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5045 		}
5046 	}
5047 }
5048 
5049 class RenderVertexUniformTexelBuffer : public RenderPassCommand
5050 {
5051 public:
RenderVertexUniformTexelBuffer(void)5052 				RenderVertexUniformTexelBuffer	(void) {}
5053 				~RenderVertexUniformTexelBuffer	(void);
5054 
getName(void) const5055 	const char*	getName							(void) const { return "RenderVertexUniformTexelBuffer"; }
5056 	void		logPrepare						(TestLog&, size_t) const;
5057 	void		logSubmit						(TestLog&, size_t) const;
5058 	void		prepare							(PrepareRenderPassContext&);
5059 	void		submit							(SubmitContext& context);
5060 	void		verify							(VerifyRenderPassContext&, size_t);
5061 
5062 private:
5063 	PipelineResources				m_resources;
5064 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
5065 	vector<vk::VkDescriptorSet>		m_descriptorSets;
5066 	vector<vk::VkBufferView>		m_bufferViews;
5067 
5068 	const vk::DeviceInterface*		m_vkd;
5069 	vk::VkDevice					m_device;
5070 	vk::VkDeviceSize				m_bufferSize;
5071 	deUint32						m_maxUniformTexelCount;
5072 };
5073 
~RenderVertexUniformTexelBuffer(void)5074 RenderVertexUniformTexelBuffer::~RenderVertexUniformTexelBuffer (void)
5075 {
5076 	for (size_t bufferViewNdx = 0; bufferViewNdx < m_bufferViews.size(); bufferViewNdx++)
5077 	{
5078 		if (!!m_bufferViews[bufferViewNdx])
5079 		{
5080 			m_vkd->destroyBufferView(m_device, m_bufferViews[bufferViewNdx], DE_NULL);
5081 			m_bufferViews[bufferViewNdx] = (vk::VkBufferView)0;
5082 		}
5083 	}
5084 }
5085 
logPrepare(TestLog & log,size_t commandIndex) const5086 void RenderVertexUniformTexelBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5087 {
5088 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
5089 }
5090 
logSubmit(TestLog & log,size_t commandIndex) const5091 void RenderVertexUniformTexelBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5092 {
5093 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
5094 }
5095 
prepare(PrepareRenderPassContext & context)5096 void RenderVertexUniformTexelBuffer::prepare (PrepareRenderPassContext& context)
5097 {
5098 	const vk::InstanceInterface&				vki						= context.getContext().getInstanceInterface();
5099 	const vk::VkPhysicalDevice					physicalDevice			= context.getContext().getPhysicalDevice();
5100 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
5101 	const vk::VkDevice							device					= context.getContext().getDevice();
5102 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
5103 	const deUint32								subpass					= 0;
5104 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-texel-buffer.vert"), 0));
5105 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5106 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
5107 
5108 	m_device				= device;
5109 	m_vkd					= &vkd;
5110 	m_bufferSize			= context.getBufferSize();
5111 	m_maxUniformTexelCount	= vk::getPhysicalDeviceProperties(vki, physicalDevice).limits.maxTexelBufferElements;
5112 
5113 	{
5114 		const vk::VkDescriptorSetLayoutBinding binding =
5115 		{
5116 			0u,
5117 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5118 			1,
5119 			vk::VK_SHADER_STAGE_VERTEX_BIT,
5120 			DE_NULL
5121 		};
5122 
5123 		bindings.push_back(binding);
5124 	}
5125 
5126 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5127 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5128 
5129 	{
5130 		const deUint32							descriptorCount	= (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)m_maxUniformTexelCount * 2));
5131 		const vk::VkDescriptorPoolSize			poolSizes		=
5132 		{
5133 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5134 			descriptorCount
5135 		};
5136 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
5137 		{
5138 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5139 			DE_NULL,
5140 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5141 
5142 			descriptorCount,
5143 			1u,
5144 			&poolSizes,
5145 		};
5146 
5147 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5148 		m_descriptorSets.resize(descriptorCount, (vk::VkDescriptorSet)0);
5149 		m_bufferViews.resize(descriptorCount, (vk::VkBufferView)0);
5150 	}
5151 
5152 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5153 	{
5154 		const deUint32							count			= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5155 																? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5156 																: m_maxUniformTexelCount * 2) / 2;
5157 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
5158 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
5159 		{
5160 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5161 			DE_NULL,
5162 
5163 			*m_descriptorPool,
5164 			1,
5165 			&layout
5166 		};
5167 
5168 		m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5169 
5170 		{
5171 			const vk::VkBufferViewCreateInfo createInfo =
5172 			{
5173 				vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5174 				DE_NULL,
5175 				0u,
5176 
5177 				context.getBuffer(),
5178 				vk::VK_FORMAT_R16_UINT,
5179 				descriptorSetNdx * m_maxUniformTexelCount * 2,
5180 				count * 2
5181 			};
5182 
5183 			VK_CHECK(vkd.createBufferView(device, &createInfo, DE_NULL, &m_bufferViews[descriptorSetNdx]));
5184 		}
5185 
5186 		{
5187 			const vk::VkWriteDescriptorSet			write		=
5188 			{
5189 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5190 				DE_NULL,
5191 				m_descriptorSets[descriptorSetNdx],
5192 				0u,
5193 				0u,
5194 				1u,
5195 				vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
5196 				DE_NULL,
5197 				DE_NULL,
5198 				&m_bufferViews[descriptorSetNdx]
5199 			};
5200 
5201 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5202 		}
5203 	}
5204 }
5205 
submit(SubmitContext & context)5206 void RenderVertexUniformTexelBuffer::submit (SubmitContext& context)
5207 {
5208 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
5209 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
5210 
5211 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5212 
5213 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5214 	{
5215 		const deUint32 count	= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5216 								? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5217 								: m_maxUniformTexelCount * 2) / 2;
5218 
5219 		vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5220 		vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5221 	}
5222 }
5223 
verify(VerifyRenderPassContext & context,size_t)5224 void RenderVertexUniformTexelBuffer::verify (VerifyRenderPassContext& context, size_t)
5225 {
5226 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5227 	{
5228 		const size_t	offset	= descriptorSetNdx * m_maxUniformTexelCount * 2;
5229 		const deUint32	count	= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 2
5230 								? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 2
5231 								: m_maxUniformTexelCount * 2) / 2;
5232 
5233 		for (size_t pos = 0; pos < (size_t)count; pos++)
5234 		{
5235 			const deUint8 x  = context.getReference().get(offset + pos * 2);
5236 			const deUint8 y  = context.getReference().get(offset + (pos * 2) + 1);
5237 
5238 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5239 		}
5240 	}
5241 }
5242 
5243 class RenderVertexStorageBuffer : public RenderPassCommand
5244 {
5245 public:
RenderVertexStorageBuffer(void)5246 				RenderVertexStorageBuffer	(void) {}
5247 				~RenderVertexStorageBuffer	(void);
5248 
getName(void) const5249 	const char*	getName						(void) const { return "RenderVertexStorageBuffer"; }
5250 	void		logPrepare					(TestLog&, size_t) const;
5251 	void		logSubmit					(TestLog&, size_t) const;
5252 	void		prepare						(PrepareRenderPassContext&);
5253 	void		submit						(SubmitContext& context);
5254 	void		verify						(VerifyRenderPassContext&, size_t);
5255 
5256 private:
5257 	PipelineResources				m_resources;
5258 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
5259 	vector<vk::VkDescriptorSet>		m_descriptorSets;
5260 
5261 	vk::VkDeviceSize				m_bufferSize;
5262 };
5263 
~RenderVertexStorageBuffer(void)5264 RenderVertexStorageBuffer::~RenderVertexStorageBuffer (void)
5265 {
5266 }
5267 
logPrepare(TestLog & log,size_t commandIndex) const5268 void RenderVertexStorageBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5269 {
5270 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as storage buffer." << TestLog::EndMessage;
5271 }
5272 
logSubmit(TestLog & log,size_t commandIndex) const5273 void RenderVertexStorageBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5274 {
5275 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
5276 }
5277 
prepare(PrepareRenderPassContext & context)5278 void RenderVertexStorageBuffer::prepare (PrepareRenderPassContext& context)
5279 {
5280 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
5281 	const vk::VkDevice							device					= context.getContext().getDevice();
5282 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
5283 	const deUint32								subpass					= 0;
5284 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-buffer.vert"), 0));
5285 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5286 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
5287 
5288 	m_bufferSize = context.getBufferSize();
5289 
5290 	{
5291 		const vk::VkDescriptorSetLayoutBinding binding =
5292 		{
5293 			0u,
5294 			vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5295 			1,
5296 			vk::VK_SHADER_STAGE_VERTEX_BIT,
5297 			DE_NULL
5298 		};
5299 
5300 		bindings.push_back(binding);
5301 	}
5302 
5303 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5304 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5305 
5306 	{
5307 		const deUint32							descriptorCount	= (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_STORAGE_BUFFER_SIZE));
5308 		const vk::VkDescriptorPoolSize			poolSizes		=
5309 		{
5310 			vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5311 			descriptorCount
5312 		};
5313 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
5314 		{
5315 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5316 			DE_NULL,
5317 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5318 
5319 			descriptorCount,
5320 			1u,
5321 			&poolSizes,
5322 		};
5323 
5324 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5325 		m_descriptorSets.resize(descriptorCount);
5326 	}
5327 
5328 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5329 	{
5330 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
5331 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
5332 		{
5333 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5334 			DE_NULL,
5335 
5336 			*m_descriptorPool,
5337 			1,
5338 			&layout
5339 		};
5340 
5341 		m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5342 
5343 		{
5344 			const vk::VkDescriptorBufferInfo		bufferInfo	=
5345 			{
5346 				context.getBuffer(),
5347 				descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE,
5348 				de::min(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE,  (vk::VkDeviceSize)MAX_STORAGE_BUFFER_SIZE)
5349 			};
5350 			const vk::VkWriteDescriptorSet			write		=
5351 			{
5352 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5353 				DE_NULL,
5354 				m_descriptorSets[descriptorSetNdx],
5355 				0u,
5356 				0u,
5357 				1u,
5358 				vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
5359 				DE_NULL,
5360 				&bufferInfo,
5361 				DE_NULL,
5362 			};
5363 
5364 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5365 		}
5366 	}
5367 }
5368 
submit(SubmitContext & context)5369 void RenderVertexStorageBuffer::submit (SubmitContext& context)
5370 {
5371 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
5372 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
5373 
5374 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5375 
5376 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5377 	{
5378 		const size_t size	= m_bufferSize < (descriptorSetNdx + 1) * MAX_STORAGE_BUFFER_SIZE
5379 							? (size_t)(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE)
5380 							: (size_t)(MAX_STORAGE_BUFFER_SIZE);
5381 
5382 		vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5383 		vkd.cmdDraw(commandBuffer, (deUint32)(size / 2), 1, 0, 0);
5384 	}
5385 }
5386 
verify(VerifyRenderPassContext & context,size_t)5387 void RenderVertexStorageBuffer::verify (VerifyRenderPassContext& context, size_t)
5388 {
5389 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5390 	{
5391 		const size_t offset	= descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE;
5392 		const size_t size	= m_bufferSize < (descriptorSetNdx + 1) * MAX_STORAGE_BUFFER_SIZE
5393 							? (size_t)(m_bufferSize - descriptorSetNdx * MAX_STORAGE_BUFFER_SIZE)
5394 							: (size_t)(MAX_STORAGE_BUFFER_SIZE);
5395 
5396 		for (size_t pos = 0; pos < size / 2; pos++)
5397 		{
5398 			const deUint8 x  = context.getReference().get(offset + pos * 2);
5399 			const deUint8 y  = context.getReference().get(offset + (pos * 2) + 1);
5400 
5401 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5402 		}
5403 	}
5404 }
5405 
5406 class RenderVertexStorageTexelBuffer : public RenderPassCommand
5407 {
5408 public:
RenderVertexStorageTexelBuffer(void)5409 				RenderVertexStorageTexelBuffer	(void) {}
5410 				~RenderVertexStorageTexelBuffer	(void);
5411 
getName(void) const5412 	const char*	getName							(void) const { return "RenderVertexStorageTexelBuffer"; }
5413 	void		logPrepare						(TestLog&, size_t) const;
5414 	void		logSubmit						(TestLog&, size_t) const;
5415 	void		prepare							(PrepareRenderPassContext&);
5416 	void		submit							(SubmitContext& context);
5417 	void		verify							(VerifyRenderPassContext&, size_t);
5418 
5419 private:
5420 	PipelineResources				m_resources;
5421 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
5422 	vector<vk::VkDescriptorSet>		m_descriptorSets;
5423 	vector<vk::VkBufferView>		m_bufferViews;
5424 
5425 	const vk::DeviceInterface*		m_vkd;
5426 	vk::VkDevice					m_device;
5427 	vk::VkDeviceSize				m_bufferSize;
5428 	deUint32						m_maxStorageTexelCount;
5429 };
5430 
~RenderVertexStorageTexelBuffer(void)5431 RenderVertexStorageTexelBuffer::~RenderVertexStorageTexelBuffer (void)
5432 {
5433 	for (size_t bufferViewNdx = 0; bufferViewNdx < m_bufferViews.size(); bufferViewNdx++)
5434 	{
5435 		if (!!m_bufferViews[bufferViewNdx])
5436 		{
5437 			m_vkd->destroyBufferView(m_device, m_bufferViews[bufferViewNdx], DE_NULL);
5438 			m_bufferViews[bufferViewNdx] = (vk::VkBufferView)0;
5439 		}
5440 	}
5441 }
5442 
logPrepare(TestLog & log,size_t commandIndex) const5443 void RenderVertexStorageTexelBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5444 {
5445 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as storage buffer." << TestLog::EndMessage;
5446 }
5447 
logSubmit(TestLog & log,size_t commandIndex) const5448 void RenderVertexStorageTexelBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5449 {
5450 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
5451 }
5452 
prepare(PrepareRenderPassContext & context)5453 void RenderVertexStorageTexelBuffer::prepare (PrepareRenderPassContext& context)
5454 {
5455 	const vk::InstanceInterface&				vki						= context.getContext().getInstanceInterface();
5456 	const vk::VkPhysicalDevice					physicalDevice			= context.getContext().getPhysicalDevice();
5457 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
5458 	const vk::VkDevice							device					= context.getContext().getDevice();
5459 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
5460 	const deUint32								subpass					= 0;
5461 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-texel-buffer.vert"), 0));
5462 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5463 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
5464 
5465 	m_device				= device;
5466 	m_vkd					= &vkd;
5467 	m_bufferSize			= context.getBufferSize();
5468 	m_maxStorageTexelCount	= vk::getPhysicalDeviceProperties(vki, physicalDevice).limits.maxTexelBufferElements;
5469 
5470 	{
5471 		const vk::VkDescriptorSetLayoutBinding binding =
5472 		{
5473 			0u,
5474 			vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
5475 			1,
5476 			vk::VK_SHADER_STAGE_VERTEX_BIT,
5477 			DE_NULL
5478 		};
5479 
5480 		bindings.push_back(binding);
5481 	}
5482 
5483 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5484 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5485 
5486 	{
5487 		const deUint32							descriptorCount	= (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)m_maxStorageTexelCount * 4));
5488 		const vk::VkDescriptorPoolSize			poolSizes		=
5489 		{
5490 			vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
5491 			descriptorCount
5492 		};
5493 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
5494 		{
5495 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5496 			DE_NULL,
5497 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5498 
5499 			descriptorCount,
5500 			1u,
5501 			&poolSizes,
5502 		};
5503 
5504 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5505 		m_descriptorSets.resize(descriptorCount, (vk::VkDescriptorSet)0);
5506 		m_bufferViews.resize(descriptorCount, (vk::VkBufferView)0);
5507 	}
5508 
5509 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5510 	{
5511 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
5512 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
5513 		{
5514 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5515 			DE_NULL,
5516 
5517 			*m_descriptorPool,
5518 			1,
5519 			&layout
5520 		};
5521 
5522 		m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
5523 
5524 		{
5525 			const vk::VkBufferViewCreateInfo createInfo =
5526 			{
5527 				vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
5528 				DE_NULL,
5529 				0u,
5530 
5531 				context.getBuffer(),
5532 				vk::VK_FORMAT_R32_UINT,
5533 				descriptorSetNdx * m_maxStorageTexelCount * 4,
5534 				(deUint32)de::min<vk::VkDeviceSize>(m_maxStorageTexelCount * 4, m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4)
5535 			};
5536 
5537 			VK_CHECK(vkd.createBufferView(device, &createInfo, DE_NULL, &m_bufferViews[descriptorSetNdx]));
5538 		}
5539 
5540 		{
5541 			const vk::VkWriteDescriptorSet			write		=
5542 			{
5543 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5544 				DE_NULL,
5545 				m_descriptorSets[descriptorSetNdx],
5546 				0u,
5547 				0u,
5548 				1u,
5549 				vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
5550 				DE_NULL,
5551 				DE_NULL,
5552 				&m_bufferViews[descriptorSetNdx]
5553 			};
5554 
5555 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5556 		}
5557 	}
5558 }
5559 
submit(SubmitContext & context)5560 void RenderVertexStorageTexelBuffer::submit (SubmitContext& context)
5561 {
5562 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
5563 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
5564 
5565 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5566 
5567 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5568 	{
5569 		const deUint32 count	= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxStorageTexelCount * 4
5570 								? m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4
5571 								: m_maxStorageTexelCount * 4) / 2;
5572 
5573 		vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
5574 		vkd.cmdDraw(commandBuffer, count, 1, 0, 0);
5575 	}
5576 }
5577 
verify(VerifyRenderPassContext & context,size_t)5578 void RenderVertexStorageTexelBuffer::verify (VerifyRenderPassContext& context, size_t)
5579 {
5580 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
5581 	{
5582 		const size_t	offset	= descriptorSetNdx * m_maxStorageTexelCount * 4;
5583 		const deUint32	count	= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxStorageTexelCount * 4
5584 								? m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4
5585 								: m_maxStorageTexelCount * 4) / 2;
5586 
5587 		DE_ASSERT(context.getReference().getSize() <= 4 * m_maxStorageTexelCount * m_descriptorSets.size());
5588 		DE_ASSERT(context.getReference().getSize() > offset);
5589 		DE_ASSERT(offset + count * 2 <= context.getReference().getSize());
5590 
5591 		for (size_t pos = 0; pos < (size_t)count; pos++)
5592 		{
5593 			const deUint8 x = context.getReference().get(offset + pos * 2);
5594 			const deUint8 y = context.getReference().get(offset + (pos * 2) + 1);
5595 
5596 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), x, y);
5597 		}
5598 	}
5599 }
5600 
5601 class RenderVertexStorageImage : public RenderPassCommand
5602 {
5603 public:
RenderVertexStorageImage(void)5604 				RenderVertexStorageImage	(void) {}
5605 				~RenderVertexStorageImage	(void);
5606 
getName(void) const5607 	const char*	getName						(void) const { return "RenderVertexStorageImage"; }
5608 	void		logPrepare					(TestLog&, size_t) const;
5609 	void		logSubmit					(TestLog&, size_t) const;
5610 	void		prepare						(PrepareRenderPassContext&);
5611 	void		submit						(SubmitContext& context);
5612 	void		verify						(VerifyRenderPassContext&, size_t);
5613 
5614 private:
5615 	PipelineResources				m_resources;
5616 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
5617 	vk::Move<vk::VkDescriptorSet>	m_descriptorSet;
5618 	vk::Move<vk::VkImageView>		m_imageView;
5619 };
5620 
~RenderVertexStorageImage(void)5621 RenderVertexStorageImage::~RenderVertexStorageImage (void)
5622 {
5623 }
5624 
logPrepare(TestLog & log,size_t commandIndex) const5625 void RenderVertexStorageImage::logPrepare (TestLog& log, size_t commandIndex) const
5626 {
5627 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render storage image." << TestLog::EndMessage;
5628 }
5629 
logSubmit(TestLog & log,size_t commandIndex) const5630 void RenderVertexStorageImage::logSubmit (TestLog& log, size_t commandIndex) const
5631 {
5632 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using storage image." << TestLog::EndMessage;
5633 }
5634 
prepare(PrepareRenderPassContext & context)5635 void RenderVertexStorageImage::prepare (PrepareRenderPassContext& context)
5636 {
5637 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
5638 	const vk::VkDevice							device					= context.getContext().getDevice();
5639 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
5640 	const deUint32								subpass					= 0;
5641 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-image.vert"), 0));
5642 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5643 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
5644 
5645 	{
5646 		const vk::VkDescriptorSetLayoutBinding binding =
5647 		{
5648 			0u,
5649 			vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
5650 			1,
5651 			vk::VK_SHADER_STAGE_VERTEX_BIT,
5652 			DE_NULL
5653 		};
5654 
5655 		bindings.push_back(binding);
5656 	}
5657 
5658 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5659 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5660 
5661 	{
5662 		const vk::VkDescriptorPoolSize			poolSizes		=
5663 		{
5664 			vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
5665 			1
5666 		};
5667 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
5668 		{
5669 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5670 			DE_NULL,
5671 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5672 
5673 			1u,
5674 			1u,
5675 			&poolSizes,
5676 		};
5677 
5678 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5679 	}
5680 
5681 	{
5682 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
5683 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
5684 		{
5685 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5686 			DE_NULL,
5687 
5688 			*m_descriptorPool,
5689 			1,
5690 			&layout
5691 		};
5692 
5693 		m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
5694 
5695 		{
5696 			const vk::VkImageViewCreateInfo createInfo =
5697 			{
5698 				vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
5699 				DE_NULL,
5700 				0u,
5701 
5702 				context.getImage(),
5703 				vk::VK_IMAGE_VIEW_TYPE_2D,
5704 				vk::VK_FORMAT_R8G8B8A8_UNORM,
5705 				vk::makeComponentMappingRGBA(),
5706 				{
5707 					vk::VK_IMAGE_ASPECT_COLOR_BIT,
5708 					0u,
5709 					1u,
5710 					0u,
5711 					1u
5712 				}
5713 			};
5714 
5715 			m_imageView = vk::createImageView(vkd, device, &createInfo);
5716 		}
5717 
5718 		{
5719 			const vk::VkDescriptorImageInfo			imageInfo	=
5720 			{
5721 				0,
5722 				*m_imageView,
5723 				context.getImageLayout()
5724 			};
5725 			const vk::VkWriteDescriptorSet			write		=
5726 			{
5727 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5728 				DE_NULL,
5729 				*m_descriptorSet,
5730 				0u,
5731 				0u,
5732 				1u,
5733 				vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
5734 				&imageInfo,
5735 				DE_NULL,
5736 				DE_NULL,
5737 			};
5738 
5739 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5740 		}
5741 	}
5742 }
5743 
submit(SubmitContext & context)5744 void RenderVertexStorageImage::submit (SubmitContext& context)
5745 {
5746 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
5747 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
5748 
5749 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5750 
5751 	vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &(*m_descriptorSet), 0u, DE_NULL);
5752 	vkd.cmdDraw(commandBuffer, context.getImageWidth() * context.getImageHeight() * 2, 1, 0, 0);
5753 }
5754 
verify(VerifyRenderPassContext & context,size_t)5755 void RenderVertexStorageImage::verify (VerifyRenderPassContext& context, size_t)
5756 {
5757 	for (int pos = 0; pos < (int)(context.getReferenceImage().getWidth() * context.getReferenceImage().getHeight() * 2); pos++)
5758 	{
5759 		const tcu::IVec3		size	= context.getReferenceImage().getAccess().getSize();
5760 		const tcu::UVec4		pixel	= context.getReferenceImage().getAccess().getPixelUint((pos / 2) / size.x(), (pos / 2) % size.x());
5761 
5762 		if (pos % 2 == 0)
5763 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.x(), pixel.y());
5764 		else
5765 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.z(), pixel.w());
5766 	}
5767 }
5768 
5769 class RenderVertexSampledImage : public RenderPassCommand
5770 {
5771 public:
RenderVertexSampledImage(void)5772 				RenderVertexSampledImage	(void) {}
5773 				~RenderVertexSampledImage	(void);
5774 
getName(void) const5775 	const char*	getName						(void) const { return "RenderVertexSampledImage"; }
5776 	void		logPrepare					(TestLog&, size_t) const;
5777 	void		logSubmit					(TestLog&, size_t) const;
5778 	void		prepare						(PrepareRenderPassContext&);
5779 	void		submit						(SubmitContext& context);
5780 	void		verify						(VerifyRenderPassContext&, size_t);
5781 
5782 private:
5783 	PipelineResources				m_resources;
5784 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
5785 	vk::Move<vk::VkDescriptorSet>	m_descriptorSet;
5786 	vk::Move<vk::VkImageView>		m_imageView;
5787 	vk::Move<vk::VkSampler>			m_sampler;
5788 };
5789 
~RenderVertexSampledImage(void)5790 RenderVertexSampledImage::~RenderVertexSampledImage (void)
5791 {
5792 }
5793 
logPrepare(TestLog & log,size_t commandIndex) const5794 void RenderVertexSampledImage::logPrepare (TestLog& log, size_t commandIndex) const
5795 {
5796 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render sampled image." << TestLog::EndMessage;
5797 }
5798 
logSubmit(TestLog & log,size_t commandIndex) const5799 void RenderVertexSampledImage::logSubmit (TestLog& log, size_t commandIndex) const
5800 {
5801 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using sampled image." << TestLog::EndMessage;
5802 }
5803 
prepare(PrepareRenderPassContext & context)5804 void RenderVertexSampledImage::prepare (PrepareRenderPassContext& context)
5805 {
5806 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
5807 	const vk::VkDevice							device					= context.getContext().getDevice();
5808 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
5809 	const deUint32								subpass					= 0;
5810 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("sampled-image.vert"), 0));
5811 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-white.frag"), 0));
5812 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
5813 
5814 	{
5815 		const vk::VkDescriptorSetLayoutBinding binding =
5816 		{
5817 			0u,
5818 			vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
5819 			1,
5820 			vk::VK_SHADER_STAGE_VERTEX_BIT,
5821 			DE_NULL
5822 		};
5823 
5824 		bindings.push_back(binding);
5825 	}
5826 
5827 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
5828 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_POINT_LIST, 0u, DE_NULL, m_resources);
5829 
5830 	{
5831 		const vk::VkDescriptorPoolSize			poolSizes		=
5832 		{
5833 			vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
5834 			1
5835 		};
5836 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
5837 		{
5838 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
5839 			DE_NULL,
5840 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
5841 
5842 			1u,
5843 			1u,
5844 			&poolSizes,
5845 		};
5846 
5847 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
5848 	}
5849 
5850 	{
5851 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
5852 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
5853 		{
5854 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
5855 			DE_NULL,
5856 
5857 			*m_descriptorPool,
5858 			1,
5859 			&layout
5860 		};
5861 
5862 		m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
5863 
5864 		{
5865 			const vk::VkImageViewCreateInfo createInfo =
5866 			{
5867 				vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
5868 				DE_NULL,
5869 				0u,
5870 
5871 				context.getImage(),
5872 				vk::VK_IMAGE_VIEW_TYPE_2D,
5873 				vk::VK_FORMAT_R8G8B8A8_UNORM,
5874 				vk::makeComponentMappingRGBA(),
5875 				{
5876 					vk::VK_IMAGE_ASPECT_COLOR_BIT,
5877 					0u,
5878 					1u,
5879 					0u,
5880 					1u
5881 				}
5882 			};
5883 
5884 			m_imageView = vk::createImageView(vkd, device, &createInfo);
5885 		}
5886 
5887 		{
5888 			const vk::VkSamplerCreateInfo createInfo =
5889 			{
5890 				vk::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
5891 				DE_NULL,
5892 				0u,
5893 
5894 				vk::VK_FILTER_NEAREST,
5895 				vk::VK_FILTER_NEAREST,
5896 
5897 				vk::VK_SAMPLER_MIPMAP_MODE_LINEAR,
5898 				vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
5899 				vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
5900 				vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
5901 				0.0f,
5902 				VK_FALSE,
5903 				1.0f,
5904 				VK_FALSE,
5905 				vk::VK_COMPARE_OP_ALWAYS,
5906 				0.0f,
5907 				0.0f,
5908 				vk::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
5909 				VK_FALSE
5910 			};
5911 
5912 			m_sampler = vk::createSampler(vkd, device, &createInfo);
5913 		}
5914 
5915 		{
5916 			const vk::VkDescriptorImageInfo			imageInfo	=
5917 			{
5918 				*m_sampler,
5919 				*m_imageView,
5920 				context.getImageLayout()
5921 			};
5922 			const vk::VkWriteDescriptorSet			write		=
5923 			{
5924 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
5925 				DE_NULL,
5926 				*m_descriptorSet,
5927 				0u,
5928 				0u,
5929 				1u,
5930 				vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
5931 				&imageInfo,
5932 				DE_NULL,
5933 				DE_NULL,
5934 			};
5935 
5936 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
5937 		}
5938 	}
5939 }
5940 
submit(SubmitContext & context)5941 void RenderVertexSampledImage::submit (SubmitContext& context)
5942 {
5943 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
5944 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
5945 
5946 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
5947 
5948 	vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &(*m_descriptorSet), 0u, DE_NULL);
5949 	vkd.cmdDraw(commandBuffer, context.getImageWidth() * context.getImageHeight() * 2, 1, 0, 0);
5950 }
5951 
verify(VerifyRenderPassContext & context,size_t)5952 void RenderVertexSampledImage::verify (VerifyRenderPassContext& context, size_t)
5953 {
5954 	for (int pos = 0; pos < (int)(context.getReferenceImage().getWidth() * context.getReferenceImage().getHeight() * 2); pos++)
5955 	{
5956 		const tcu::IVec3	size	= context.getReferenceImage().getAccess().getSize();
5957 		const tcu::UVec4	pixel	= context.getReferenceImage().getAccess().getPixelUint((pos / 2) / size.x(), (pos / 2) % size.x());
5958 
5959 		if (pos % 2 == 0)
5960 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.x(), pixel.y());
5961 		else
5962 			context.getReferenceTarget().getAccess().setPixel(Vec4(1.0f, 1.0f, 1.0f, 1.0f), pixel.z(), pixel.w());
5963 	}
5964 }
5965 
5966 class RenderFragmentUniformBuffer : public RenderPassCommand
5967 {
5968 public:
RenderFragmentUniformBuffer(void)5969 									RenderFragmentUniformBuffer		(void) {}
5970 									~RenderFragmentUniformBuffer	(void);
5971 
getName(void) const5972 	const char*						getName							(void) const { return "RenderFragmentUniformBuffer"; }
5973 	void							logPrepare						(TestLog&, size_t) const;
5974 	void							logSubmit						(TestLog&, size_t) const;
5975 	void							prepare							(PrepareRenderPassContext&);
5976 	void							submit							(SubmitContext& context);
5977 	void							verify							(VerifyRenderPassContext&, size_t);
5978 
5979 private:
5980 	PipelineResources				m_resources;
5981 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
5982 	vector<vk::VkDescriptorSet>		m_descriptorSets;
5983 
5984 	vk::VkDeviceSize				m_bufferSize;
5985 	size_t							m_targetWidth;
5986 	size_t							m_targetHeight;
5987 };
5988 
~RenderFragmentUniformBuffer(void)5989 RenderFragmentUniformBuffer::~RenderFragmentUniformBuffer (void)
5990 {
5991 }
5992 
logPrepare(TestLog & log,size_t commandIndex) const5993 void RenderFragmentUniformBuffer::logPrepare (TestLog& log, size_t commandIndex) const
5994 {
5995 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
5996 }
5997 
logSubmit(TestLog & log,size_t commandIndex) const5998 void RenderFragmentUniformBuffer::logSubmit (TestLog& log, size_t commandIndex) const
5999 {
6000 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
6001 }
6002 
prepare(PrepareRenderPassContext & context)6003 void RenderFragmentUniformBuffer::prepare (PrepareRenderPassContext& context)
6004 {
6005 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
6006 	const vk::VkDevice							device					= context.getContext().getDevice();
6007 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
6008 	const deUint32								subpass					= 0;
6009 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
6010 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-buffer.frag"), 0));
6011 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
6012 
6013 	m_bufferSize	= de::min(context.getBufferSize(), (vk::VkDeviceSize)MAX_SIZE);
6014 	m_targetWidth	= context.getTargetWidth();
6015 	m_targetHeight	= context.getTargetHeight();
6016 
6017 	{
6018 		const vk::VkDescriptorSetLayoutBinding binding =
6019 		{
6020 			0u,
6021 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6022 			1,
6023 			vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6024 			DE_NULL
6025 		};
6026 
6027 		bindings.push_back(binding);
6028 	}
6029 	const vk::VkPushConstantRange pushConstantRange =
6030 	{
6031 		vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6032 		0u,
6033 		8u
6034 	};
6035 
6036 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
6037 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 1u, &pushConstantRange, m_resources);
6038 
6039 	{
6040 		const deUint32							descriptorCount	= (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE));
6041 		const vk::VkDescriptorPoolSize			poolSizes		=
6042 		{
6043 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6044 			descriptorCount
6045 		};
6046 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
6047 		{
6048 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
6049 			DE_NULL,
6050 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6051 
6052 			descriptorCount,
6053 			1u,
6054 			&poolSizes,
6055 		};
6056 
6057 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6058 		m_descriptorSets.resize(descriptorCount);
6059 	}
6060 
6061 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6062 	{
6063 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
6064 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
6065 		{
6066 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6067 			DE_NULL,
6068 
6069 			*m_descriptorPool,
6070 			1,
6071 			&layout
6072 		};
6073 
6074 		m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
6075 
6076 		{
6077 			const vk::VkDescriptorBufferInfo		bufferInfo	=
6078 			{
6079 				context.getBuffer(),
6080 				(vk::VkDeviceSize)(descriptorSetNdx * (size_t)MAX_UNIFORM_BUFFER_SIZE),
6081 				m_bufferSize < (descriptorSetNdx + 1) * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
6082 					? m_bufferSize - descriptorSetNdx * (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
6083 					: (vk::VkDeviceSize)MAX_UNIFORM_BUFFER_SIZE
6084 			};
6085 			const vk::VkWriteDescriptorSet			write		=
6086 			{
6087 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6088 				DE_NULL,
6089 				m_descriptorSets[descriptorSetNdx],
6090 				0u,
6091 				0u,
6092 				1u,
6093 				vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
6094 				DE_NULL,
6095 				&bufferInfo,
6096 				DE_NULL,
6097 			};
6098 
6099 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6100 		}
6101 	}
6102 }
6103 
submit(SubmitContext & context)6104 void RenderFragmentUniformBuffer::submit (SubmitContext& context)
6105 {
6106 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
6107 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
6108 
6109 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6110 
6111 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6112 	{
6113 		const struct
6114 		{
6115 			const deUint32	callId;
6116 			const deUint32	valuesPerPixel;
6117 		} callParams =
6118 		{
6119 			(deUint32)descriptorSetNdx,
6120 			(deUint32)divRoundUp<size_t>(m_descriptorSets.size() * (MAX_UNIFORM_BUFFER_SIZE / 4), m_targetWidth * m_targetHeight)
6121 		};
6122 
6123 		vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
6124 		vkd.cmdPushConstants(commandBuffer, *m_resources.pipelineLayout, vk::VK_SHADER_STAGE_FRAGMENT_BIT, 0u, (deUint32)sizeof(callParams), &callParams);
6125 		vkd.cmdDraw(commandBuffer, 6, 1, 0, 0);
6126 	}
6127 }
6128 
verify(VerifyRenderPassContext & context,size_t)6129 void RenderFragmentUniformBuffer::verify (VerifyRenderPassContext& context, size_t)
6130 {
6131 	const deUint32	valuesPerPixel	= (deUint32)divRoundUp<size_t>(m_descriptorSets.size() * (MAX_UNIFORM_BUFFER_SIZE / 4), m_targetWidth * m_targetHeight);
6132 	const size_t	arraySize		= MAX_UNIFORM_BUFFER_SIZE / (sizeof(deUint32) * 4);
6133 	const size_t	arrayIntSize	= arraySize * 4;
6134 
6135 	for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
6136 	for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
6137 	{
6138 		const size_t firstDescriptorSetNdx = de::min<size_t>((y * 256u + x) / (arrayIntSize / valuesPerPixel), m_descriptorSets.size() - 1);
6139 
6140 		for (size_t descriptorSetNdx = firstDescriptorSetNdx; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6141 		{
6142 			const size_t	offset	= descriptorSetNdx * MAX_UNIFORM_BUFFER_SIZE;
6143 			const deUint32	callId	= (deUint32)descriptorSetNdx;
6144 
6145 			const deUint32	id		= callId * ((deUint32)arrayIntSize / valuesPerPixel) + (deUint32)y * 256u + (deUint32)x;
6146 
6147 			if (y * 256u + x < callId * (arrayIntSize / valuesPerPixel))
6148 				continue;
6149 			else
6150 			{
6151 				deUint32 value = id;
6152 
6153 				for (deUint32 i = 0; i < valuesPerPixel; i++)
6154 				{
6155 					value	= ((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 0))
6156 							| (((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 1)) << 8u)
6157 							| (((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 2)) << 16u)
6158 							| (((deUint32)context.getReference().get(offset + (value % (MAX_UNIFORM_BUFFER_SIZE / sizeof(deUint32))) * 4 + 3)) << 24u);
6159 
6160 				}
6161 				const UVec4	vec	((value >>  0u) & 0xFFu,
6162 								 (value >>  8u) & 0xFFu,
6163 								 (value >> 16u) & 0xFFu,
6164 								 (value >> 24u) & 0xFFu);
6165 
6166 				context.getReferenceTarget().getAccess().setPixel(vec.asFloat() / Vec4(255.0f), x, y);
6167 			}
6168 		}
6169 	}
6170 }
6171 
6172 class RenderFragmentStorageBuffer : public RenderPassCommand
6173 {
6174 public:
RenderFragmentStorageBuffer(void)6175 									RenderFragmentStorageBuffer		(void) {}
6176 									~RenderFragmentStorageBuffer	(void);
6177 
getName(void) const6178 	const char*						getName							(void) const { return "RenderFragmentStorageBuffer"; }
6179 	void							logPrepare						(TestLog&, size_t) const;
6180 	void							logSubmit						(TestLog&, size_t) const;
6181 	void							prepare							(PrepareRenderPassContext&);
6182 	void							submit							(SubmitContext& context);
6183 	void							verify							(VerifyRenderPassContext&, size_t);
6184 
6185 private:
6186 	PipelineResources				m_resources;
6187 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
6188 	vk::Move<vk::VkDescriptorSet>	m_descriptorSet;
6189 
6190 	vk::VkDeviceSize				m_bufferSize;
6191 	size_t							m_targetWidth;
6192 	size_t							m_targetHeight;
6193 };
6194 
~RenderFragmentStorageBuffer(void)6195 RenderFragmentStorageBuffer::~RenderFragmentStorageBuffer (void)
6196 {
6197 }
6198 
logPrepare(TestLog & log,size_t commandIndex) const6199 void RenderFragmentStorageBuffer::logPrepare (TestLog& log, size_t commandIndex) const
6200 {
6201 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline to render buffer as storage buffer." << TestLog::EndMessage;
6202 }
6203 
logSubmit(TestLog & log,size_t commandIndex) const6204 void RenderFragmentStorageBuffer::logSubmit (TestLog& log, size_t commandIndex) const
6205 {
6206 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
6207 }
6208 
prepare(PrepareRenderPassContext & context)6209 void RenderFragmentStorageBuffer::prepare (PrepareRenderPassContext& context)
6210 {
6211 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
6212 	const vk::VkDevice							device					= context.getContext().getDevice();
6213 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
6214 	const deUint32								subpass					= 0;
6215 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
6216 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-buffer.frag"), 0));
6217 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
6218 
6219 	m_bufferSize	= context.getBufferSize();
6220 	m_targetWidth	= context.getTargetWidth();
6221 	m_targetHeight	= context.getTargetHeight();
6222 
6223 	{
6224 		const vk::VkDescriptorSetLayoutBinding binding =
6225 		{
6226 			0u,
6227 			vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6228 			1,
6229 			vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6230 			DE_NULL
6231 		};
6232 
6233 		bindings.push_back(binding);
6234 	}
6235 	const vk::VkPushConstantRange pushConstantRange =
6236 	{
6237 		vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6238 		0u,
6239 		12u
6240 	};
6241 
6242 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
6243 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 1u, &pushConstantRange, m_resources);
6244 
6245 	{
6246 		const deUint32							descriptorCount	= 1;
6247 		const vk::VkDescriptorPoolSize			poolSizes		=
6248 		{
6249 			vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6250 			descriptorCount
6251 		};
6252 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
6253 		{
6254 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
6255 			DE_NULL,
6256 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6257 
6258 			descriptorCount,
6259 			1u,
6260 			&poolSizes,
6261 		};
6262 
6263 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6264 	}
6265 
6266 	{
6267 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
6268 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
6269 		{
6270 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6271 			DE_NULL,
6272 
6273 			*m_descriptorPool,
6274 			1,
6275 			&layout
6276 		};
6277 
6278 		m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
6279 
6280 		{
6281 			const vk::VkDescriptorBufferInfo	bufferInfo	=
6282 			{
6283 				context.getBuffer(),
6284 				0u,
6285 				m_bufferSize
6286 			};
6287 			const vk::VkWriteDescriptorSet		write		=
6288 			{
6289 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6290 				DE_NULL,
6291 				m_descriptorSet.get(),
6292 				0u,
6293 				0u,
6294 				1u,
6295 				vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
6296 				DE_NULL,
6297 				&bufferInfo,
6298 				DE_NULL,
6299 			};
6300 
6301 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6302 		}
6303 	}
6304 }
6305 
submit(SubmitContext & context)6306 void RenderFragmentStorageBuffer::submit (SubmitContext& context)
6307 {
6308 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
6309 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
6310 
6311 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6312 
6313 	const struct
6314 	{
6315 		const deUint32	valuesPerPixel;
6316 		const deUint32	bufferSize;
6317 	} callParams =
6318 	{
6319 		(deUint32)divRoundUp<vk::VkDeviceSize>(m_bufferSize / 4, m_targetWidth * m_targetHeight),
6320 		(deUint32)m_bufferSize
6321 	};
6322 
6323 	vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSet.get(), 0u, DE_NULL);
6324 	vkd.cmdPushConstants(commandBuffer, *m_resources.pipelineLayout, vk::VK_SHADER_STAGE_FRAGMENT_BIT, 0u, (deUint32)sizeof(callParams), &callParams);
6325 	vkd.cmdDraw(commandBuffer, 6, 1, 0, 0);
6326 }
6327 
verify(VerifyRenderPassContext & context,size_t)6328 void RenderFragmentStorageBuffer::verify (VerifyRenderPassContext& context, size_t)
6329 {
6330 	const deUint32	valuesPerPixel	= (deUint32)divRoundUp<vk::VkDeviceSize>(m_bufferSize / 4, m_targetWidth * m_targetHeight);
6331 
6332 	for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
6333 	for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
6334 	{
6335 		const deUint32	id		= (deUint32)y * 256u + (deUint32)x;
6336 
6337 		deUint32 value = id;
6338 
6339 		for (deUint32 i = 0; i < valuesPerPixel; i++)
6340 		{
6341 			value	= (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 0)) << 0u)
6342 					| (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 1)) << 8u)
6343 					| (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 2)) << 16u)
6344 					| (((deUint32)context.getReference().get((size_t)(value % (m_bufferSize / sizeof(deUint32))) * 4 + 3)) << 24u);
6345 
6346 		}
6347 		const UVec4	vec	((value >>  0u) & 0xFFu,
6348 						 (value >>  8u) & 0xFFu,
6349 						 (value >> 16u) & 0xFFu,
6350 						 (value >> 24u) & 0xFFu);
6351 
6352 		context.getReferenceTarget().getAccess().setPixel(vec.asFloat() / Vec4(255.0f), x, y);
6353 	}
6354 }
6355 
6356 class RenderFragmentUniformTexelBuffer : public RenderPassCommand
6357 {
6358 public:
RenderFragmentUniformTexelBuffer(void)6359 									RenderFragmentUniformTexelBuffer	(void) {}
6360 									~RenderFragmentUniformTexelBuffer	(void);
6361 
getName(void) const6362 	const char*						getName								(void) const { return "RenderFragmentUniformTexelBuffer"; }
6363 	void							logPrepare							(TestLog&, size_t) const;
6364 	void							logSubmit							(TestLog&, size_t) const;
6365 	void							prepare								(PrepareRenderPassContext&);
6366 	void							submit								(SubmitContext& context);
6367 	void							verify								(VerifyRenderPassContext&, size_t);
6368 
6369 private:
6370 	PipelineResources				m_resources;
6371 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
6372 	vector<vk::VkDescriptorSet>		m_descriptorSets;
6373 	vector<vk::VkBufferView>		m_bufferViews;
6374 
6375 	const vk::DeviceInterface*		m_vkd;
6376 	vk::VkDevice					m_device;
6377 	vk::VkDeviceSize				m_bufferSize;
6378 	deUint32						m_maxUniformTexelCount;
6379 	size_t							m_targetWidth;
6380 	size_t							m_targetHeight;
6381 };
6382 
~RenderFragmentUniformTexelBuffer(void)6383 RenderFragmentUniformTexelBuffer::~RenderFragmentUniformTexelBuffer (void)
6384 {
6385 	for (size_t bufferViewNdx = 0; bufferViewNdx < m_bufferViews.size(); bufferViewNdx++)
6386 	{
6387 		if (!!m_bufferViews[bufferViewNdx])
6388 		{
6389 			m_vkd->destroyBufferView(m_device, m_bufferViews[bufferViewNdx], DE_NULL);
6390 			m_bufferViews[bufferViewNdx] = (vk::VkBufferView)0;
6391 		}
6392 	}
6393 }
6394 
logPrepare(TestLog & log,size_t commandIndex) const6395 void RenderFragmentUniformTexelBuffer::logPrepare (TestLog& log, size_t commandIndex) const
6396 {
6397 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as uniform buffer." << TestLog::EndMessage;
6398 }
6399 
logSubmit(TestLog & log,size_t commandIndex) const6400 void RenderFragmentUniformTexelBuffer::logSubmit (TestLog& log, size_t commandIndex) const
6401 {
6402 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as uniform buffer." << TestLog::EndMessage;
6403 }
6404 
prepare(PrepareRenderPassContext & context)6405 void RenderFragmentUniformTexelBuffer::prepare (PrepareRenderPassContext& context)
6406 {
6407 	const vk::InstanceInterface&				vki						= context.getContext().getInstanceInterface();
6408 	const vk::VkPhysicalDevice					physicalDevice			= context.getContext().getPhysicalDevice();
6409 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
6410 	const vk::VkDevice							device					= context.getContext().getDevice();
6411 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
6412 	const deUint32								subpass					= 0;
6413 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
6414 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("uniform-texel-buffer.frag"), 0));
6415 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
6416 
6417 	m_device				= device;
6418 	m_vkd					= &vkd;
6419 	m_bufferSize			= context.getBufferSize();
6420 	m_maxUniformTexelCount	= vk::getPhysicalDeviceProperties(vki, physicalDevice).limits.maxTexelBufferElements;
6421 	m_targetWidth			= context.getTargetWidth();
6422 	m_targetHeight			= context.getTargetHeight();
6423 
6424 	{
6425 		const vk::VkDescriptorSetLayoutBinding binding =
6426 		{
6427 			0u,
6428 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
6429 			1,
6430 			vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6431 			DE_NULL
6432 		};
6433 
6434 		bindings.push_back(binding);
6435 	}
6436 	const vk::VkPushConstantRange pushConstantRange =
6437 	{
6438 		vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6439 		0u,
6440 		12u
6441 	};
6442 
6443 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
6444 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 1u, &pushConstantRange, m_resources);
6445 
6446 	{
6447 		const deUint32							descriptorCount	= (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)m_maxUniformTexelCount * 4));
6448 		const vk::VkDescriptorPoolSize			poolSizes		=
6449 		{
6450 			vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
6451 			descriptorCount
6452 		};
6453 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
6454 		{
6455 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
6456 			DE_NULL,
6457 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6458 
6459 			descriptorCount,
6460 			1u,
6461 			&poolSizes,
6462 		};
6463 
6464 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6465 		m_descriptorSets.resize(descriptorCount, (vk::VkDescriptorSet)0);
6466 		m_bufferViews.resize(descriptorCount, (vk::VkBufferView)0);
6467 	}
6468 
6469 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6470 	{
6471 		const deUint32							count			= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 4
6472 																? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 4
6473 																: m_maxUniformTexelCount * 4) / 4;
6474 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
6475 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
6476 		{
6477 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6478 			DE_NULL,
6479 
6480 			*m_descriptorPool,
6481 			1,
6482 			&layout
6483 		};
6484 
6485 		m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
6486 
6487 		{
6488 			const vk::VkBufferViewCreateInfo createInfo =
6489 			{
6490 				vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
6491 				DE_NULL,
6492 				0u,
6493 
6494 				context.getBuffer(),
6495 				vk::VK_FORMAT_R32_UINT,
6496 				descriptorSetNdx * m_maxUniformTexelCount * 4,
6497 				count * 4
6498 			};
6499 
6500 			VK_CHECK(vkd.createBufferView(device, &createInfo, DE_NULL, &m_bufferViews[descriptorSetNdx]));
6501 		}
6502 
6503 		{
6504 			const vk::VkWriteDescriptorSet			write		=
6505 			{
6506 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6507 				DE_NULL,
6508 				m_descriptorSets[descriptorSetNdx],
6509 				0u,
6510 				0u,
6511 				1u,
6512 				vk::VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
6513 				DE_NULL,
6514 				DE_NULL,
6515 				&m_bufferViews[descriptorSetNdx]
6516 			};
6517 
6518 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6519 		}
6520 	}
6521 }
6522 
submit(SubmitContext & context)6523 void RenderFragmentUniformTexelBuffer::submit (SubmitContext& context)
6524 {
6525 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
6526 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
6527 
6528 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6529 
6530 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6531 	{
6532 		const struct
6533 		{
6534 			const deUint32	callId;
6535 			const deUint32	valuesPerPixel;
6536 			const deUint32	maxUniformTexelCount;
6537 		} callParams =
6538 		{
6539 			(deUint32)descriptorSetNdx,
6540 			(deUint32)divRoundUp<size_t>(m_descriptorSets.size() * de::min<size_t>((size_t)m_bufferSize / 4, m_maxUniformTexelCount), m_targetWidth * m_targetHeight),
6541 			m_maxUniformTexelCount
6542 		};
6543 
6544 		vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
6545 		vkd.cmdPushConstants(commandBuffer, *m_resources.pipelineLayout, vk::VK_SHADER_STAGE_FRAGMENT_BIT, 0u, (deUint32)sizeof(callParams), &callParams);
6546 		vkd.cmdDraw(commandBuffer, 6, 1, 0, 0);
6547 	}
6548 }
6549 
verify(VerifyRenderPassContext & context,size_t)6550 void RenderFragmentUniformTexelBuffer::verify (VerifyRenderPassContext& context, size_t)
6551 {
6552 	const deUint32	valuesPerPixel	= (deUint32)divRoundUp<size_t>(m_descriptorSets.size() * de::min<size_t>((size_t)m_bufferSize / 4, m_maxUniformTexelCount), m_targetWidth * m_targetHeight);
6553 
6554 	for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
6555 	for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
6556 	{
6557 		const size_t firstDescriptorSetNdx = de::min<size_t>((y * 256u + x) / (m_maxUniformTexelCount / valuesPerPixel), m_descriptorSets.size() - 1);
6558 
6559 		for (size_t descriptorSetNdx = firstDescriptorSetNdx; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6560 		{
6561 			const size_t	offset	= descriptorSetNdx * m_maxUniformTexelCount * 4;
6562 			const deUint32	callId	= (deUint32)descriptorSetNdx;
6563 
6564 			const deUint32	id		= (deUint32)y * 256u + (deUint32)x;
6565 			const deUint32	count	= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxUniformTexelCount * 4
6566 									? m_bufferSize - descriptorSetNdx * m_maxUniformTexelCount * 4
6567 									: m_maxUniformTexelCount * 4) / 4;
6568 
6569 			if (y * 256u + x < callId * (m_maxUniformTexelCount / valuesPerPixel))
6570 				continue;
6571 			else
6572 			{
6573 				deUint32 value = id;
6574 
6575 				for (deUint32 i = 0; i < valuesPerPixel; i++)
6576 				{
6577 					value	= ((deUint32)context.getReference().get( offset + (value % count) * 4 + 0))
6578 							| (((deUint32)context.getReference().get(offset + (value % count) * 4 + 1)) << 8u)
6579 							| (((deUint32)context.getReference().get(offset + (value % count) * 4 + 2)) << 16u)
6580 							| (((deUint32)context.getReference().get(offset + (value % count) * 4 + 3)) << 24u);
6581 
6582 				}
6583 				const UVec4	vec	((value >>  0u) & 0xFFu,
6584 								 (value >>  8u) & 0xFFu,
6585 								 (value >> 16u) & 0xFFu,
6586 								 (value >> 24u) & 0xFFu);
6587 
6588 				context.getReferenceTarget().getAccess().setPixel(vec.asFloat() / Vec4(255.0f), x, y);
6589 			}
6590 		}
6591 	}
6592 }
6593 
6594 class RenderFragmentStorageTexelBuffer : public RenderPassCommand
6595 {
6596 public:
RenderFragmentStorageTexelBuffer(void)6597 									RenderFragmentStorageTexelBuffer	(void) {}
6598 									~RenderFragmentStorageTexelBuffer	(void);
6599 
getName(void) const6600 	const char*						getName								(void) const { return "RenderFragmentStorageTexelBuffer"; }
6601 	void							logPrepare							(TestLog&, size_t) const;
6602 	void							logSubmit							(TestLog&, size_t) const;
6603 	void							prepare								(PrepareRenderPassContext&);
6604 	void							submit								(SubmitContext& context);
6605 	void							verify								(VerifyRenderPassContext&, size_t);
6606 
6607 private:
6608 	PipelineResources				m_resources;
6609 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
6610 	vector<vk::VkDescriptorSet>		m_descriptorSets;
6611 	vector<vk::VkBufferView>		m_bufferViews;
6612 
6613 	const vk::DeviceInterface*		m_vkd;
6614 	vk::VkDevice					m_device;
6615 	vk::VkDeviceSize				m_bufferSize;
6616 	deUint32						m_maxStorageTexelCount;
6617 	size_t							m_targetWidth;
6618 	size_t							m_targetHeight;
6619 };
6620 
~RenderFragmentStorageTexelBuffer(void)6621 RenderFragmentStorageTexelBuffer::~RenderFragmentStorageTexelBuffer (void)
6622 {
6623 	for (size_t bufferViewNdx = 0; bufferViewNdx < m_bufferViews.size(); bufferViewNdx++)
6624 	{
6625 		if (!!m_bufferViews[bufferViewNdx])
6626 		{
6627 			m_vkd->destroyBufferView(m_device, m_bufferViews[bufferViewNdx], DE_NULL);
6628 			m_bufferViews[bufferViewNdx] = (vk::VkBufferView)0;
6629 		}
6630 	}
6631 }
6632 
logPrepare(TestLog & log,size_t commandIndex) const6633 void RenderFragmentStorageTexelBuffer::logPrepare (TestLog& log, size_t commandIndex) const
6634 {
6635 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render buffer as storage buffer." << TestLog::EndMessage;
6636 }
6637 
logSubmit(TestLog & log,size_t commandIndex) const6638 void RenderFragmentStorageTexelBuffer::logSubmit (TestLog& log, size_t commandIndex) const
6639 {
6640 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using buffer as storage buffer." << TestLog::EndMessage;
6641 }
6642 
prepare(PrepareRenderPassContext & context)6643 void RenderFragmentStorageTexelBuffer::prepare (PrepareRenderPassContext& context)
6644 {
6645 	const vk::InstanceInterface&				vki						= context.getContext().getInstanceInterface();
6646 	const vk::VkPhysicalDevice					physicalDevice			= context.getContext().getPhysicalDevice();
6647 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
6648 	const vk::VkDevice							device					= context.getContext().getDevice();
6649 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
6650 	const deUint32								subpass					= 0;
6651 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
6652 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-texel-buffer.frag"), 0));
6653 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
6654 
6655 	m_device				= device;
6656 	m_vkd					= &vkd;
6657 	m_bufferSize			= context.getBufferSize();
6658 	m_maxStorageTexelCount	= vk::getPhysicalDeviceProperties(vki, physicalDevice).limits.maxTexelBufferElements;
6659 	m_targetWidth			= context.getTargetWidth();
6660 	m_targetHeight			= context.getTargetHeight();
6661 
6662 	{
6663 		const vk::VkDescriptorSetLayoutBinding binding =
6664 		{
6665 			0u,
6666 			vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
6667 			1,
6668 			vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6669 			DE_NULL
6670 		};
6671 
6672 		bindings.push_back(binding);
6673 	}
6674 	const vk::VkPushConstantRange pushConstantRange =
6675 	{
6676 		vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6677 		0u,
6678 		16u
6679 	};
6680 
6681 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
6682 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 1u, &pushConstantRange, m_resources);
6683 
6684 	{
6685 		const deUint32							descriptorCount	= (deUint32)(divRoundUp(m_bufferSize, (vk::VkDeviceSize)m_maxStorageTexelCount * 4));
6686 		const vk::VkDescriptorPoolSize			poolSizes		=
6687 		{
6688 			vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
6689 			descriptorCount
6690 		};
6691 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
6692 		{
6693 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
6694 			DE_NULL,
6695 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6696 
6697 			descriptorCount,
6698 			1u,
6699 			&poolSizes,
6700 		};
6701 
6702 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6703 		m_descriptorSets.resize(descriptorCount, (vk::VkDescriptorSet)0);
6704 		m_bufferViews.resize(descriptorCount, (vk::VkBufferView)0);
6705 	}
6706 
6707 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6708 	{
6709 		const deUint32							count			= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxStorageTexelCount * 4
6710 																? m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4
6711 																: m_maxStorageTexelCount * 4) / 4;
6712 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
6713 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
6714 		{
6715 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6716 			DE_NULL,
6717 
6718 			*m_descriptorPool,
6719 			1,
6720 			&layout
6721 		};
6722 
6723 		m_descriptorSets[descriptorSetNdx] = vk::allocateDescriptorSet(vkd, device, &allocateInfo).disown();
6724 
6725 		{
6726 			const vk::VkBufferViewCreateInfo createInfo =
6727 			{
6728 				vk::VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
6729 				DE_NULL,
6730 				0u,
6731 
6732 				context.getBuffer(),
6733 				vk::VK_FORMAT_R32_UINT,
6734 				descriptorSetNdx * m_maxStorageTexelCount * 4,
6735 				count * 4
6736 			};
6737 
6738 			VK_CHECK(vkd.createBufferView(device, &createInfo, DE_NULL, &m_bufferViews[descriptorSetNdx]));
6739 		}
6740 
6741 		{
6742 			const vk::VkWriteDescriptorSet			write		=
6743 			{
6744 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6745 				DE_NULL,
6746 				m_descriptorSets[descriptorSetNdx],
6747 				0u,
6748 				0u,
6749 				1u,
6750 				vk::VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
6751 				DE_NULL,
6752 				DE_NULL,
6753 				&m_bufferViews[descriptorSetNdx]
6754 			};
6755 
6756 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6757 		}
6758 	}
6759 }
6760 
submit(SubmitContext & context)6761 void RenderFragmentStorageTexelBuffer::submit (SubmitContext& context)
6762 {
6763 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
6764 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
6765 
6766 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6767 
6768 	for (size_t descriptorSetNdx = 0; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6769 	{
6770 		const struct
6771 		{
6772 			const deUint32	callId;
6773 			const deUint32	valuesPerPixel;
6774 			const deUint32	maxStorageTexelCount;
6775 			const deUint32	width;
6776 		} callParams =
6777 		{
6778 			(deUint32)descriptorSetNdx,
6779 			(deUint32)divRoundUp<size_t>(m_descriptorSets.size() * de::min<size_t>(m_maxStorageTexelCount, (size_t)m_bufferSize / 4), m_targetWidth * m_targetHeight),
6780 			m_maxStorageTexelCount,
6781 			(deUint32)(m_bufferSize < (descriptorSetNdx + 1u) * m_maxStorageTexelCount * 4u
6782 								? m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4u
6783 								: m_maxStorageTexelCount * 4u) / 4u
6784 		};
6785 
6786 		vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &m_descriptorSets[descriptorSetNdx], 0u, DE_NULL);
6787 		vkd.cmdPushConstants(commandBuffer, *m_resources.pipelineLayout, vk::VK_SHADER_STAGE_FRAGMENT_BIT, 0u, (deUint32)sizeof(callParams), &callParams);
6788 		vkd.cmdDraw(commandBuffer, 6, 1, 0, 0);
6789 	}
6790 }
6791 
verify(VerifyRenderPassContext & context,size_t)6792 void RenderFragmentStorageTexelBuffer::verify (VerifyRenderPassContext& context, size_t)
6793 {
6794 	const deUint32	valuesPerPixel	= (deUint32)divRoundUp<size_t>(m_descriptorSets.size() * de::min<size_t>(m_maxStorageTexelCount, (size_t)m_bufferSize / 4), m_targetWidth * m_targetHeight);
6795 
6796 	for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
6797 	for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
6798 	{
6799 		const size_t firstDescriptorSetNdx = de::min<size_t>((y * 256u + x) / (m_maxStorageTexelCount / valuesPerPixel), m_descriptorSets.size() - 1);
6800 
6801 		for (size_t descriptorSetNdx = firstDescriptorSetNdx; descriptorSetNdx < m_descriptorSets.size(); descriptorSetNdx++)
6802 		{
6803 			const size_t	offset	= descriptorSetNdx * m_maxStorageTexelCount * 4;
6804 			const deUint32	callId	= (deUint32)descriptorSetNdx;
6805 
6806 			const deUint32	id		= (deUint32)y * 256u + (deUint32)x;
6807 			const deUint32	count	= (deUint32)(m_bufferSize < (descriptorSetNdx + 1) * m_maxStorageTexelCount * 4
6808 									? m_bufferSize - descriptorSetNdx * m_maxStorageTexelCount * 4
6809 									: m_maxStorageTexelCount * 4) / 4;
6810 
6811 			if (y * 256u + x < callId * (m_maxStorageTexelCount / valuesPerPixel))
6812 				continue;
6813 			else
6814 			{
6815 				deUint32 value = id;
6816 
6817 				for (deUint32 i = 0; i < valuesPerPixel; i++)
6818 				{
6819 					value	= ((deUint32)context.getReference().get( offset + (value % count) * 4 + 0))
6820 							| (((deUint32)context.getReference().get(offset + (value % count) * 4 + 1)) << 8u)
6821 							| (((deUint32)context.getReference().get(offset + (value % count) * 4 + 2)) << 16u)
6822 							| (((deUint32)context.getReference().get(offset + (value % count) * 4 + 3)) << 24u);
6823 
6824 				}
6825 				const UVec4	vec	((value >>  0u) & 0xFFu,
6826 								 (value >>  8u) & 0xFFu,
6827 								 (value >> 16u) & 0xFFu,
6828 								 (value >> 24u) & 0xFFu);
6829 
6830 				context.getReferenceTarget().getAccess().setPixel(vec.asFloat() / Vec4(255.0f), x, y);
6831 			}
6832 		}
6833 	}
6834 }
6835 
6836 class RenderFragmentStorageImage : public RenderPassCommand
6837 {
6838 public:
RenderFragmentStorageImage(void)6839 									RenderFragmentStorageImage	(void) {}
6840 									~RenderFragmentStorageImage	(void);
6841 
getName(void) const6842 	const char*						getName						(void) const { return "RenderFragmentStorageImage"; }
6843 	void							logPrepare					(TestLog&, size_t) const;
6844 	void							logSubmit					(TestLog&, size_t) const;
6845 	void							prepare						(PrepareRenderPassContext&);
6846 	void							submit						(SubmitContext& context);
6847 	void							verify						(VerifyRenderPassContext&, size_t);
6848 
6849 private:
6850 	PipelineResources				m_resources;
6851 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
6852 	vk::Move<vk::VkDescriptorSet>	m_descriptorSet;
6853 	vk::Move<vk::VkImageView>		m_imageView;
6854 };
6855 
~RenderFragmentStorageImage(void)6856 RenderFragmentStorageImage::~RenderFragmentStorageImage (void)
6857 {
6858 }
6859 
logPrepare(TestLog & log,size_t commandIndex) const6860 void RenderFragmentStorageImage::logPrepare (TestLog& log, size_t commandIndex) const
6861 {
6862 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render storage image." << TestLog::EndMessage;
6863 }
6864 
logSubmit(TestLog & log,size_t commandIndex) const6865 void RenderFragmentStorageImage::logSubmit (TestLog& log, size_t commandIndex) const
6866 {
6867 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using storage image." << TestLog::EndMessage;
6868 }
6869 
prepare(PrepareRenderPassContext & context)6870 void RenderFragmentStorageImage::prepare (PrepareRenderPassContext& context)
6871 {
6872 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
6873 	const vk::VkDevice							device					= context.getContext().getDevice();
6874 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
6875 	const deUint32								subpass					= 0;
6876 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
6877 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("storage-image.frag"), 0));
6878 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
6879 
6880 	{
6881 		const vk::VkDescriptorSetLayoutBinding binding =
6882 		{
6883 			0u,
6884 			vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
6885 			1,
6886 			vk::VK_SHADER_STAGE_FRAGMENT_BIT,
6887 			DE_NULL
6888 		};
6889 
6890 		bindings.push_back(binding);
6891 	}
6892 
6893 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
6894 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 0u, DE_NULL, m_resources);
6895 
6896 	{
6897 		const vk::VkDescriptorPoolSize			poolSizes		=
6898 		{
6899 			vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
6900 			1
6901 		};
6902 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
6903 		{
6904 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
6905 			DE_NULL,
6906 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
6907 
6908 			1u,
6909 			1u,
6910 			&poolSizes,
6911 		};
6912 
6913 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
6914 	}
6915 
6916 	{
6917 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
6918 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
6919 		{
6920 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
6921 			DE_NULL,
6922 
6923 			*m_descriptorPool,
6924 			1,
6925 			&layout
6926 		};
6927 
6928 		m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
6929 
6930 		{
6931 			const vk::VkImageViewCreateInfo createInfo =
6932 			{
6933 				vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
6934 				DE_NULL,
6935 				0u,
6936 
6937 				context.getImage(),
6938 				vk::VK_IMAGE_VIEW_TYPE_2D,
6939 				vk::VK_FORMAT_R8G8B8A8_UNORM,
6940 				vk::makeComponentMappingRGBA(),
6941 				{
6942 					vk::VK_IMAGE_ASPECT_COLOR_BIT,
6943 					0u,
6944 					1u,
6945 					0u,
6946 					1u
6947 				}
6948 			};
6949 
6950 			m_imageView = vk::createImageView(vkd, device, &createInfo);
6951 		}
6952 
6953 		{
6954 			const vk::VkDescriptorImageInfo			imageInfo	=
6955 			{
6956 				0,
6957 				*m_imageView,
6958 				context.getImageLayout()
6959 			};
6960 			const vk::VkWriteDescriptorSet			write		=
6961 			{
6962 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
6963 				DE_NULL,
6964 				*m_descriptorSet,
6965 				0u,
6966 				0u,
6967 				1u,
6968 				vk::VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
6969 				&imageInfo,
6970 				DE_NULL,
6971 				DE_NULL,
6972 			};
6973 
6974 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
6975 		}
6976 	}
6977 }
6978 
submit(SubmitContext & context)6979 void RenderFragmentStorageImage::submit (SubmitContext& context)
6980 {
6981 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
6982 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
6983 
6984 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
6985 
6986 	vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &(*m_descriptorSet), 0u, DE_NULL);
6987 	vkd.cmdDraw(commandBuffer, 6, 1, 0, 0);
6988 }
6989 
verify(VerifyRenderPassContext & context,size_t)6990 void RenderFragmentStorageImage::verify (VerifyRenderPassContext& context, size_t)
6991 {
6992 	const UVec2		size			= UVec2(context.getReferenceImage().getWidth(), context.getReferenceImage().getHeight());
6993 	const deUint32	valuesPerPixel	= de::max<deUint32>(1u, (size.x() * size.y()) / (256u * 256u));
6994 
6995 	for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
6996 	for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
6997 	{
6998 		UVec4	value	= UVec4(x, y, 0u, 0u);
6999 
7000 		for (deUint32 i = 0; i < valuesPerPixel; i++)
7001 		{
7002 			const UVec2	pos			= UVec2(value.z() * 256u + (value.x() ^ value.z()), value.w() * 256u + (value.y() ^ value.w()));
7003 			const Vec4	floatValue	= context.getReferenceImage().getAccess().getPixel(pos.x() % size.x(), pos.y() % size.y());
7004 
7005 			value = UVec4((deUint32)(floatValue.x() * 255.0f),
7006 						  (deUint32)(floatValue.y() * 255.0f),
7007 						  (deUint32)(floatValue.z() * 255.0f),
7008 						  (deUint32)(floatValue.w() * 255.0f));
7009 
7010 		}
7011 		context.getReferenceTarget().getAccess().setPixel(value.asFloat() / Vec4(255.0f), x, y);
7012 	}
7013 }
7014 
7015 class RenderFragmentSampledImage : public RenderPassCommand
7016 {
7017 public:
RenderFragmentSampledImage(void)7018 				RenderFragmentSampledImage	(void) {}
7019 				~RenderFragmentSampledImage	(void);
7020 
getName(void) const7021 	const char*	getName						(void) const { return "RenderFragmentSampledImage"; }
7022 	void		logPrepare					(TestLog&, size_t) const;
7023 	void		logSubmit					(TestLog&, size_t) const;
7024 	void		prepare						(PrepareRenderPassContext&);
7025 	void		submit						(SubmitContext& context);
7026 	void		verify						(VerifyRenderPassContext&, size_t);
7027 
7028 private:
7029 	PipelineResources				m_resources;
7030 	vk::Move<vk::VkDescriptorPool>	m_descriptorPool;
7031 	vk::Move<vk::VkDescriptorSet>	m_descriptorSet;
7032 	vk::Move<vk::VkImageView>		m_imageView;
7033 	vk::Move<vk::VkSampler>			m_sampler;
7034 };
7035 
~RenderFragmentSampledImage(void)7036 RenderFragmentSampledImage::~RenderFragmentSampledImage (void)
7037 {
7038 }
7039 
logPrepare(TestLog & log,size_t commandIndex) const7040 void RenderFragmentSampledImage::logPrepare (TestLog& log, size_t commandIndex) const
7041 {
7042 	log << TestLog::Message << commandIndex << ":" << getName() << " Create pipeline for render storage image." << TestLog::EndMessage;
7043 }
7044 
logSubmit(TestLog & log,size_t commandIndex) const7045 void RenderFragmentSampledImage::logSubmit (TestLog& log, size_t commandIndex) const
7046 {
7047 	log << TestLog::Message << commandIndex << ":" << getName() << " Render using storage image." << TestLog::EndMessage;
7048 }
7049 
prepare(PrepareRenderPassContext & context)7050 void RenderFragmentSampledImage::prepare (PrepareRenderPassContext& context)
7051 {
7052 	const vk::DeviceInterface&					vkd						= context.getContext().getDeviceInterface();
7053 	const vk::VkDevice							device					= context.getContext().getDevice();
7054 	const vk::VkRenderPass						renderPass				= context.getRenderPass();
7055 	const deUint32								subpass					= 0;
7056 	const vk::Unique<vk::VkShaderModule>		vertexShaderModule		(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("render-quad.vert"), 0));
7057 	const vk::Unique<vk::VkShaderModule>		fragmentShaderModule	(vk::createShaderModule(vkd, device, context.getBinaryCollection().get("sampled-image.frag"), 0));
7058 	vector<vk::VkDescriptorSetLayoutBinding>	bindings;
7059 
7060 	{
7061 		const vk::VkDescriptorSetLayoutBinding binding =
7062 		{
7063 			0u,
7064 			vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
7065 			1,
7066 			vk::VK_SHADER_STAGE_FRAGMENT_BIT,
7067 			DE_NULL
7068 		};
7069 
7070 		bindings.push_back(binding);
7071 	}
7072 
7073 	createPipelineWithResources(vkd, device, renderPass, subpass, *vertexShaderModule, *fragmentShaderModule, context.getTargetWidth(), context.getTargetHeight(),
7074 								vector<vk::VkVertexInputBindingDescription>(), vector<vk::VkVertexInputAttributeDescription>(), bindings, vk::VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 0u, DE_NULL, m_resources);
7075 
7076 	{
7077 		const vk::VkDescriptorPoolSize			poolSizes		=
7078 		{
7079 			vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
7080 			1
7081 		};
7082 		const vk::VkDescriptorPoolCreateInfo	createInfo		=
7083 		{
7084 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
7085 			DE_NULL,
7086 			vk::VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
7087 
7088 			1u,
7089 			1u,
7090 			&poolSizes,
7091 		};
7092 
7093 		m_descriptorPool = vk::createDescriptorPool(vkd, device, &createInfo);
7094 	}
7095 
7096 	{
7097 		const vk::VkDescriptorSetLayout			layout			= *m_resources.descriptorSetLayout;
7098 		const vk::VkDescriptorSetAllocateInfo	allocateInfo	=
7099 		{
7100 			vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
7101 			DE_NULL,
7102 
7103 			*m_descriptorPool,
7104 			1,
7105 			&layout
7106 		};
7107 
7108 		m_descriptorSet = vk::allocateDescriptorSet(vkd, device, &allocateInfo);
7109 
7110 		{
7111 			const vk::VkImageViewCreateInfo createInfo =
7112 			{
7113 				vk::VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
7114 				DE_NULL,
7115 				0u,
7116 
7117 				context.getImage(),
7118 				vk::VK_IMAGE_VIEW_TYPE_2D,
7119 				vk::VK_FORMAT_R8G8B8A8_UNORM,
7120 				vk::makeComponentMappingRGBA(),
7121 				{
7122 					vk::VK_IMAGE_ASPECT_COLOR_BIT,
7123 					0u,
7124 					1u,
7125 					0u,
7126 					1u
7127 				}
7128 			};
7129 
7130 			m_imageView = vk::createImageView(vkd, device, &createInfo);
7131 		}
7132 
7133 		{
7134 			const vk::VkSamplerCreateInfo createInfo =
7135 			{
7136 				vk::VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
7137 				DE_NULL,
7138 				0u,
7139 
7140 				vk::VK_FILTER_NEAREST,
7141 				vk::VK_FILTER_NEAREST,
7142 
7143 				vk::VK_SAMPLER_MIPMAP_MODE_LINEAR,
7144 				vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
7145 				vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
7146 				vk::VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
7147 				0.0f,
7148 				VK_FALSE,
7149 				1.0f,
7150 				VK_FALSE,
7151 				vk::VK_COMPARE_OP_ALWAYS,
7152 				0.0f,
7153 				0.0f,
7154 				vk::VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
7155 				VK_FALSE
7156 			};
7157 
7158 			m_sampler = vk::createSampler(vkd, device, &createInfo);
7159 		}
7160 
7161 		{
7162 			const vk::VkDescriptorImageInfo			imageInfo	=
7163 			{
7164 				*m_sampler,
7165 				*m_imageView,
7166 				context.getImageLayout()
7167 			};
7168 			const vk::VkWriteDescriptorSet			write		=
7169 			{
7170 				vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
7171 				DE_NULL,
7172 				*m_descriptorSet,
7173 				0u,
7174 				0u,
7175 				1u,
7176 				vk::VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
7177 				&imageInfo,
7178 				DE_NULL,
7179 				DE_NULL,
7180 			};
7181 
7182 			vkd.updateDescriptorSets(device, 1u, &write, 0u, DE_NULL);
7183 		}
7184 	}
7185 }
7186 
submit(SubmitContext & context)7187 void RenderFragmentSampledImage::submit (SubmitContext& context)
7188 {
7189 	const vk::DeviceInterface&	vkd				= context.getContext().getDeviceInterface();
7190 	const vk::VkCommandBuffer	commandBuffer	= context.getCommandBuffer();
7191 
7192 	vkd.cmdBindPipeline(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipeline);
7193 
7194 	vkd.cmdBindDescriptorSets(commandBuffer, vk::VK_PIPELINE_BIND_POINT_GRAPHICS, *m_resources.pipelineLayout, 0u, 1u, &(*m_descriptorSet), 0u, DE_NULL);
7195 	vkd.cmdDraw(commandBuffer, 6u, 1u, 0u, 0u);
7196 }
7197 
verify(VerifyRenderPassContext & context,size_t)7198 void RenderFragmentSampledImage::verify (VerifyRenderPassContext& context, size_t)
7199 {
7200 	const UVec2		size			= UVec2(context.getReferenceImage().getWidth(), context.getReferenceImage().getHeight());
7201 	const deUint32	valuesPerPixel	= de::max<deUint32>(1u, (size.x() * size.y()) / (256u * 256u));
7202 
7203 	for (int y = 0; y < context.getReferenceTarget().getSize().y(); y++)
7204 	for (int x = 0; x < context.getReferenceTarget().getSize().x(); x++)
7205 	{
7206 		UVec4	value	= UVec4(x, y, 0u, 0u);
7207 
7208 		for (deUint32 i = 0; i < valuesPerPixel; i++)
7209 		{
7210 			const UVec2	pos			= UVec2(value.z() * 256u + (value.x() ^ value.z()), value.w() * 256u + (value.y() ^ value.w()));
7211 			const Vec4	floatValue	= context.getReferenceImage().getAccess().getPixel(pos.x() % size.x(), pos.y() % size.y());
7212 
7213 			value = UVec4((deUint32)(floatValue.x() * 255.0f),
7214 						  (deUint32)(floatValue.y() * 255.0f),
7215 						  (deUint32)(floatValue.z() * 255.0f),
7216 						  (deUint32)(floatValue.w() * 255.0f));
7217 
7218 		}
7219 
7220 		context.getReferenceTarget().getAccess().setPixel(value.asFloat() / Vec4(255.0f), x, y);
7221 	}
7222 }
7223 
7224 enum Op
7225 {
7226 	OP_MAP,
7227 	OP_UNMAP,
7228 
7229 	OP_MAP_FLUSH,
7230 	OP_MAP_INVALIDATE,
7231 
7232 	OP_MAP_READ,
7233 	OP_MAP_WRITE,
7234 	OP_MAP_MODIFY,
7235 
7236 	OP_BUFFER_CREATE,
7237 	OP_BUFFER_DESTROY,
7238 	OP_BUFFER_BINDMEMORY,
7239 
7240 	OP_QUEUE_WAIT_FOR_IDLE,
7241 	OP_DEVICE_WAIT_FOR_IDLE,
7242 
7243 	OP_COMMAND_BUFFER_BEGIN,
7244 	OP_COMMAND_BUFFER_END,
7245 
7246 	// Secondary, non render pass command buffers
7247 	// Render pass secondary command buffers are not currently covered
7248 	OP_SECONDARY_COMMAND_BUFFER_BEGIN,
7249 	OP_SECONDARY_COMMAND_BUFFER_END,
7250 
7251 	// Buffer transfer operations
7252 	OP_BUFFER_FILL,
7253 	OP_BUFFER_UPDATE,
7254 
7255 	OP_BUFFER_COPY_TO_BUFFER,
7256 	OP_BUFFER_COPY_FROM_BUFFER,
7257 
7258 	OP_BUFFER_COPY_TO_IMAGE,
7259 	OP_BUFFER_COPY_FROM_IMAGE,
7260 
7261 	OP_IMAGE_CREATE,
7262 	OP_IMAGE_DESTROY,
7263 	OP_IMAGE_BINDMEMORY,
7264 
7265 	OP_IMAGE_TRANSITION_LAYOUT,
7266 
7267 	OP_IMAGE_COPY_TO_BUFFER,
7268 	OP_IMAGE_COPY_FROM_BUFFER,
7269 
7270 	OP_IMAGE_COPY_TO_IMAGE,
7271 	OP_IMAGE_COPY_FROM_IMAGE,
7272 
7273 	OP_IMAGE_BLIT_TO_IMAGE,
7274 	OP_IMAGE_BLIT_FROM_IMAGE,
7275 
7276 	OP_IMAGE_RESOLVE,
7277 
7278 	OP_PIPELINE_BARRIER_GLOBAL,
7279 	OP_PIPELINE_BARRIER_BUFFER,
7280 	OP_PIPELINE_BARRIER_IMAGE,
7281 
7282 	// Renderpass operations
7283 	OP_RENDERPASS_BEGIN,
7284 	OP_RENDERPASS_END,
7285 
7286 	// Commands inside render pass
7287 	OP_RENDER_VERTEX_BUFFER,
7288 	OP_RENDER_INDEX_BUFFER,
7289 
7290 	OP_RENDER_VERTEX_UNIFORM_BUFFER,
7291 	OP_RENDER_FRAGMENT_UNIFORM_BUFFER,
7292 
7293 	OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER,
7294 	OP_RENDER_FRAGMENT_UNIFORM_TEXEL_BUFFER,
7295 
7296 	OP_RENDER_VERTEX_STORAGE_BUFFER,
7297 	OP_RENDER_FRAGMENT_STORAGE_BUFFER,
7298 
7299 	OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER,
7300 	OP_RENDER_FRAGMENT_STORAGE_TEXEL_BUFFER,
7301 
7302 	OP_RENDER_VERTEX_STORAGE_IMAGE,
7303 	OP_RENDER_FRAGMENT_STORAGE_IMAGE,
7304 
7305 	OP_RENDER_VERTEX_SAMPLED_IMAGE,
7306 	OP_RENDER_FRAGMENT_SAMPLED_IMAGE,
7307 };
7308 
7309 enum Stage
7310 {
7311 	STAGE_HOST,
7312 	STAGE_COMMAND_BUFFER,
7313 	STAGE_SECONDARY_COMMAND_BUFFER,
7314 
7315 	STAGE_RENDER_PASS
7316 };
7317 
getWriteAccessFlags(void)7318 vk::VkAccessFlags getWriteAccessFlags (void)
7319 {
7320 	return vk::VK_ACCESS_SHADER_WRITE_BIT
7321 		| vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
7322 		| vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT
7323 		| vk::VK_ACCESS_TRANSFER_WRITE_BIT
7324 		| vk::VK_ACCESS_HOST_WRITE_BIT
7325 		| vk::VK_ACCESS_MEMORY_WRITE_BIT;
7326 }
7327 
isWriteAccess(vk::VkAccessFlagBits access)7328 bool isWriteAccess (vk::VkAccessFlagBits access)
7329 {
7330 	return (getWriteAccessFlags() & access) != 0;
7331 }
7332 
7333 class CacheState
7334 {
7335 public:
7336 									CacheState				(vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses);
7337 
7338 	bool							isValid					(vk::VkPipelineStageFlagBits	stage,
7339 															 vk::VkAccessFlagBits			access) const;
7340 
7341 	void							perform					(vk::VkPipelineStageFlagBits	stage,
7342 															 vk::VkAccessFlagBits			access);
7343 
7344 	void							submitCommandBuffer		(void);
7345 	void							waitForIdle				(void);
7346 
7347 	void							getFullBarrier			(vk::VkPipelineStageFlags&	srcStages,
7348 															 vk::VkAccessFlags&			srcAccesses,
7349 															 vk::VkPipelineStageFlags&	dstStages,
7350 															 vk::VkAccessFlags&			dstAccesses) const;
7351 
7352 	void							barrier					(vk::VkPipelineStageFlags	srcStages,
7353 															 vk::VkAccessFlags			srcAccesses,
7354 															 vk::VkPipelineStageFlags	dstStages,
7355 															 vk::VkAccessFlags			dstAccesses);
7356 
7357 	void							imageLayoutBarrier		(vk::VkPipelineStageFlags	srcStages,
7358 															 vk::VkAccessFlags			srcAccesses,
7359 															 vk::VkPipelineStageFlags	dstStages,
7360 															 vk::VkAccessFlags			dstAccesses);
7361 
7362 	void							checkImageLayoutBarrier	(vk::VkPipelineStageFlags	srcStages,
7363 															 vk::VkAccessFlags			srcAccesses,
7364 															 vk::VkPipelineStageFlags	dstStages,
7365 															 vk::VkAccessFlags			dstAccesses);
7366 
7367 	// Everything is clean and there is no need for barriers
7368 	bool							isClean					(void) const;
7369 
getAllowedStages(void) const7370 	vk::VkPipelineStageFlags		getAllowedStages		(void) const { return m_allowedStages; }
getAllowedAcceses(void) const7371 	vk::VkAccessFlags				getAllowedAcceses		(void) const { return m_allowedAccesses; }
7372 private:
7373 	// Limit which stages and accesses are used by the CacheState tracker
7374 	const vk::VkPipelineStageFlags	m_allowedStages;
7375 	const vk::VkAccessFlags			m_allowedAccesses;
7376 
7377 	// [dstStage][srcStage][dstAccess] = srcAccesses
7378 	// In stage dstStage write srcAccesses from srcStage are not yet available for dstAccess
7379 	vk::VkAccessFlags				m_unavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST][ACCESS_LAST];
7380 	// Latest pipeline transition is not available in stage
7381 	bool							m_unavailableLayoutTransition[PIPELINESTAGE_LAST];
7382 	// [dstStage] = dstAccesses
7383 	// In stage dstStage ops with dstAccesses are not yet visible
7384 	vk::VkAccessFlags				m_invisibleOperations[PIPELINESTAGE_LAST];
7385 
7386 	// [dstStage] = srcStage
7387 	// Memory operation in srcStage have not completed before dstStage
7388 	vk::VkPipelineStageFlags		m_incompleteOperations[PIPELINESTAGE_LAST];
7389 };
7390 
CacheState(vk::VkPipelineStageFlags allowedStages,vk::VkAccessFlags allowedAccesses)7391 CacheState::CacheState (vk::VkPipelineStageFlags allowedStages, vk::VkAccessFlags allowedAccesses)
7392 	: m_allowedStages	(allowedStages)
7393 	, m_allowedAccesses	(allowedAccesses)
7394 {
7395 	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
7396 	{
7397 		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7398 
7399 		if ((dstStage_ & m_allowedStages) == 0)
7400 			continue;
7401 
7402 		// All operations are initially visible
7403 		m_invisibleOperations[dstStage] = 0;
7404 
7405 		// There are no incomplete read operations initially
7406 		m_incompleteOperations[dstStage] = 0;
7407 
7408 		// There are no incomplete layout transitions
7409 		m_unavailableLayoutTransition[dstStage] = false;
7410 
7411 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7412 		{
7413 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7414 
7415 			if ((srcStage_ & m_allowedStages) == 0)
7416 				continue;
7417 
7418 			// There are no write operations that are not yet available
7419 			// initially.
7420 			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
7421 			{
7422 				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
7423 
7424 				if ((dstAccess_ & m_allowedAccesses) == 0)
7425 					continue;
7426 
7427 				m_unavailableWriteOperations[dstStage][srcStage][dstAccess] = 0;
7428 			}
7429 		}
7430 	}
7431 }
7432 
isValid(vk::VkPipelineStageFlagBits stage,vk::VkAccessFlagBits access) const7433 bool CacheState::isValid (vk::VkPipelineStageFlagBits	stage,
7434 						  vk::VkAccessFlagBits			access) const
7435 {
7436 	DE_ASSERT((access & (~m_allowedAccesses)) == 0);
7437 	DE_ASSERT((stage & (~m_allowedStages)) == 0);
7438 
7439 	const PipelineStage	dstStage	= pipelineStageFlagToPipelineStage(stage);
7440 
7441 	// Previous operations are not visible to access on stage
7442 	if (m_unavailableLayoutTransition[dstStage] || (m_invisibleOperations[dstStage] & access) != 0)
7443 		return false;
7444 
7445 	if (isWriteAccess(access))
7446 	{
7447 		// Memory operations from other stages have not completed before
7448 		// dstStage
7449 		if (m_incompleteOperations[dstStage] != 0)
7450 			return false;
7451 	}
7452 
7453 	return true;
7454 }
7455 
perform(vk::VkPipelineStageFlagBits stage,vk::VkAccessFlagBits access)7456 void CacheState::perform (vk::VkPipelineStageFlagBits	stage,
7457 						  vk::VkAccessFlagBits			access)
7458 {
7459 	DE_ASSERT((access & (~m_allowedAccesses)) == 0);
7460 	DE_ASSERT((stage & (~m_allowedStages)) == 0);
7461 
7462 	const PipelineStage srcStage = pipelineStageFlagToPipelineStage(stage);
7463 
7464 	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
7465 	{
7466 		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7467 
7468 		if ((dstStage_ & m_allowedStages) == 0)
7469 			continue;
7470 
7471 		// Mark stage as incomplete for all stages
7472 		m_incompleteOperations[dstStage] |= stage;
7473 
7474 		if (isWriteAccess(access))
7475 		{
7476 			// Mark all accesses from all stages invisible
7477 			m_invisibleOperations[dstStage] |= m_allowedAccesses;
7478 
7479 			// Mark write access from srcStage unavailable to all stages for all accesses
7480 			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
7481 			{
7482 				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
7483 
7484 				if ((dstAccess_ & m_allowedAccesses) == 0)
7485 					continue;
7486 
7487 				m_unavailableWriteOperations[dstStage][srcStage][dstAccess] |= access;
7488 			}
7489 		}
7490 	}
7491 }
7492 
submitCommandBuffer(void)7493 void CacheState::submitCommandBuffer (void)
7494 {
7495 	// Flush all host writes and reads
7496 	barrier(m_allowedStages & vk::VK_PIPELINE_STAGE_HOST_BIT,
7497 			m_allowedAccesses & (vk::VK_ACCESS_HOST_READ_BIT | vk::VK_ACCESS_HOST_WRITE_BIT),
7498 			m_allowedStages,
7499 			m_allowedAccesses);
7500 }
7501 
waitForIdle(void)7502 void CacheState::waitForIdle (void)
7503 {
7504 	// Make all writes available
7505 	barrier(m_allowedStages,
7506 			m_allowedAccesses & getWriteAccessFlags(),
7507 			m_allowedStages,
7508 			0);
7509 
7510 	// Make all writes visible on device side
7511 	barrier(m_allowedStages,
7512 			0,
7513 			m_allowedStages & (~vk::VK_PIPELINE_STAGE_HOST_BIT),
7514 			m_allowedAccesses);
7515 }
7516 
getFullBarrier(vk::VkPipelineStageFlags & srcStages,vk::VkAccessFlags & srcAccesses,vk::VkPipelineStageFlags & dstStages,vk::VkAccessFlags & dstAccesses) const7517 void CacheState::getFullBarrier (vk::VkPipelineStageFlags&	srcStages,
7518 								 vk::VkAccessFlags&			srcAccesses,
7519 								 vk::VkPipelineStageFlags&	dstStages,
7520 								 vk::VkAccessFlags&			dstAccesses) const
7521 {
7522 	srcStages	= 0;
7523 	srcAccesses	= 0;
7524 	dstStages	= 0;
7525 	dstAccesses	= 0;
7526 
7527 	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
7528 	{
7529 		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7530 
7531 		if ((dstStage_ & m_allowedStages) == 0)
7532 			continue;
7533 
7534 		// Make sure all previous operation are complete in all stages
7535 		if (m_incompleteOperations[dstStage])
7536 		{
7537 			dstStages |= dstStage_;
7538 			srcStages |= m_incompleteOperations[dstStage];
7539 		}
7540 
7541 		// Make sure all read operations are visible in dstStage
7542 		if (m_invisibleOperations[dstStage])
7543 		{
7544 			dstStages |= dstStage_;
7545 			dstAccesses |= m_invisibleOperations[dstStage];
7546 		}
7547 
7548 		// Make sure all write operations from all stages are available
7549 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7550 		{
7551 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7552 
7553 			if ((srcStage_ & m_allowedStages) == 0)
7554 				continue;
7555 
7556 			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
7557 			{
7558 				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
7559 
7560 				if ((dstAccess_ & m_allowedAccesses) == 0)
7561 					continue;
7562 
7563 				if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess])
7564 				{
7565 					dstStages |= dstStage_;
7566 					srcStages |= dstStage_;
7567 					srcAccesses |= m_unavailableWriteOperations[dstStage][srcStage][dstAccess];
7568 				}
7569 			}
7570 
7571 			if (m_unavailableLayoutTransition[dstStage] && !m_unavailableLayoutTransition[srcStage])
7572 			{
7573 				// Add dependency between srcStage and dstStage if layout transition has not completed in dstStage,
7574 				// but has completed in srcStage.
7575 				dstStages |= dstStage_;
7576 				srcStages |= dstStage_;
7577 			}
7578 		}
7579 	}
7580 
7581 	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
7582 	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
7583 	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
7584 	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
7585 }
7586 
checkImageLayoutBarrier(vk::VkPipelineStageFlags srcStages,vk::VkAccessFlags srcAccesses,vk::VkPipelineStageFlags dstStages,vk::VkAccessFlags dstAccesses)7587 void CacheState::checkImageLayoutBarrier (vk::VkPipelineStageFlags	srcStages,
7588 										  vk::VkAccessFlags			srcAccesses,
7589 										  vk::VkPipelineStageFlags	dstStages,
7590 										  vk::VkAccessFlags			dstAccesses)
7591 {
7592 	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
7593 	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
7594 	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
7595 	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
7596 
7597 	DE_UNREF(srcStages);
7598 	DE_UNREF(srcAccesses);
7599 
7600 	DE_UNREF(dstStages);
7601 	DE_UNREF(dstAccesses);
7602 
7603 #if defined(DE_DEBUG)
7604 	// Check that all stages have completed before srcStages or are in srcStages.
7605 	{
7606 		vk::VkPipelineStageFlags completedStages = srcStages;
7607 
7608 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
7609 		{
7610 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7611 
7612 			if ((srcStage_ & srcStages) == 0)
7613 				continue;
7614 
7615 			completedStages |= (~m_incompleteOperations[srcStage]);
7616 		}
7617 
7618 		DE_ASSERT((completedStages & m_allowedStages) == m_allowedStages);
7619 	}
7620 
7621 	// Check that any write is available at least in one stage. Since all stages are complete even single flush is enough.
7622 	if ((getWriteAccessFlags() & m_allowedAccesses) != 0 && (srcAccesses & getWriteAccessFlags()) == 0)
7623 	{
7624 		bool anyWriteAvailable = false;
7625 
7626 		for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
7627 		{
7628 			const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7629 
7630 			if ((dstStage_ & m_allowedStages) == 0)
7631 				continue;
7632 
7633 			for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7634 			{
7635 				const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7636 
7637 				if ((srcStage_ & m_allowedStages) == 0)
7638 					continue;
7639 
7640 				for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
7641 				{
7642 					const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
7643 
7644 					if ((dstAccess_ & m_allowedAccesses) == 0)
7645 						continue;
7646 
7647 					if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess] != (getWriteAccessFlags() & m_allowedAccesses))
7648 					{
7649 						anyWriteAvailable = true;
7650 						break;
7651 					}
7652 				}
7653 			}
7654 		}
7655 
7656 		DE_ASSERT(anyWriteAvailable);
7657 	}
7658 #endif
7659 }
7660 
imageLayoutBarrier(vk::VkPipelineStageFlags srcStages,vk::VkAccessFlags srcAccesses,vk::VkPipelineStageFlags dstStages,vk::VkAccessFlags dstAccesses)7661 void CacheState::imageLayoutBarrier (vk::VkPipelineStageFlags	srcStages,
7662 									 vk::VkAccessFlags			srcAccesses,
7663 									 vk::VkPipelineStageFlags	dstStages,
7664 									 vk::VkAccessFlags			dstAccesses)
7665 {
7666 	checkImageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
7667 
7668 	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
7669 	{
7670 		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7671 
7672 		if ((dstStage_ & m_allowedStages) == 0)
7673 			continue;
7674 
7675 		// All stages are incomplete after the barrier except each dstStage in it self.
7676 		m_incompleteOperations[dstStage] = m_allowedStages & (~dstStage_);
7677 
7678 		// All memory operations are invisible unless they are listed in dstAccess
7679 		m_invisibleOperations[dstStage] = m_allowedAccesses & (~dstAccesses);
7680 
7681 		// Layout transition is unavailable in stage unless it was listed in dstStages
7682 		m_unavailableLayoutTransition[dstStage]= (dstStage_ & dstStages) == 0;
7683 
7684 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7685 		{
7686 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7687 
7688 			if ((srcStage_ & m_allowedStages) == 0)
7689 				continue;
7690 
7691 			// All write operations are available after layout transition
7692 			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
7693 			{
7694 				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
7695 
7696 				if ((dstAccess_ & m_allowedAccesses) == 0)
7697 					continue;
7698 
7699 				m_unavailableWriteOperations[dstStage][srcStage][dstAccess] = 0;
7700 			}
7701 		}
7702 	}
7703 }
7704 
barrier(vk::VkPipelineStageFlags srcStages,vk::VkAccessFlags srcAccesses,vk::VkPipelineStageFlags dstStages,vk::VkAccessFlags dstAccesses)7705 void CacheState::barrier (vk::VkPipelineStageFlags	srcStages,
7706 						  vk::VkAccessFlags			srcAccesses,
7707 						  vk::VkPipelineStageFlags	dstStages,
7708 						  vk::VkAccessFlags			dstAccesses)
7709 {
7710 	DE_ASSERT((srcStages & (~m_allowedStages)) == 0);
7711 	DE_ASSERT((srcAccesses & (~m_allowedAccesses)) == 0);
7712 	DE_ASSERT((dstStages & (~m_allowedStages)) == 0);
7713 	DE_ASSERT((dstAccesses & (~m_allowedAccesses)) == 0);
7714 
7715 	// Transitivity
7716 	{
7717 		vk::VkPipelineStageFlags		oldIncompleteOperations[PIPELINESTAGE_LAST];
7718 		vk::VkAccessFlags				oldUnavailableWriteOperations[PIPELINESTAGE_LAST][PIPELINESTAGE_LAST][ACCESS_LAST];
7719 		bool							oldUnavailableLayoutTransition[PIPELINESTAGE_LAST];
7720 
7721 		deMemcpy(oldIncompleteOperations, m_incompleteOperations, sizeof(oldIncompleteOperations));
7722 		deMemcpy(oldUnavailableWriteOperations, m_unavailableWriteOperations, sizeof(oldUnavailableWriteOperations));
7723 		deMemcpy(oldUnavailableLayoutTransition, m_unavailableLayoutTransition, sizeof(oldUnavailableLayoutTransition));
7724 
7725 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= srcStages; srcStage_ <<= 1)
7726 		{
7727 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7728 
7729 			if ((srcStage_ & srcStages) == 0)
7730 				continue;
7731 
7732 			for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
7733 			{
7734 				const PipelineStage	dstStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7735 
7736 				if ((dstStage_ & dstStages) == 0)
7737 					continue;
7738 
7739 				// Stages that have completed before srcStage have also completed before dstStage
7740 				m_incompleteOperations[dstStage] &= oldIncompleteOperations[srcStage];
7741 
7742 				// Image layout transition in srcStage are now available in dstStage
7743 				m_unavailableLayoutTransition[dstStage] &= oldUnavailableLayoutTransition[srcStage];
7744 
7745 				for (vk::VkPipelineStageFlags sharedStage_ = 1; sharedStage_ <= m_allowedStages; sharedStage_ <<= 1)
7746 				{
7747 					const PipelineStage	sharedStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)sharedStage_);
7748 
7749 					if ((sharedStage_ & m_allowedStages) == 0)
7750 						continue;
7751 
7752 					// Writes that are available in srcStage are also available in dstStage
7753 					for (vk::VkAccessFlags sharedAccess_ = 1; sharedAccess_ <= m_allowedAccesses; sharedAccess_ <<= 1)
7754 					{
7755 						const Access sharedAccess = accessFlagToAccess((vk::VkAccessFlagBits)sharedAccess_);
7756 
7757 						if ((sharedAccess_ & m_allowedAccesses) == 0)
7758 							continue;
7759 
7760 						m_unavailableWriteOperations[dstStage][sharedStage][sharedAccess] &= oldUnavailableWriteOperations[srcStage][sharedStage][sharedAccess];
7761 					}
7762 				}
7763 			}
7764 		}
7765 	}
7766 
7767 	// Barrier
7768 	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= dstStages; dstStage_ <<= 1)
7769 	{
7770 		const PipelineStage	dstStage			= pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7771 		bool				allWritesAvailable	= true;
7772 
7773 		if ((dstStage_ & dstStages) == 0)
7774 			continue;
7775 
7776 		// Operations in srcStages have completed before any stage in dstStages
7777 		m_incompleteOperations[dstStage] &= ~srcStages;
7778 
7779 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7780 		{
7781 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7782 
7783 			if ((srcStage_ & m_allowedStages) == 0)
7784 				continue;
7785 
7786 			// Make srcAccesses from srcStage available in dstStage for dstAccess
7787 			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
7788 			{
7789 				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
7790 
7791 				if ((dstAccess_ & m_allowedAccesses) == 0)
7792 					continue;
7793 
7794 				if (((srcStage_ & srcStages) != 0) && ((dstAccess_ & dstAccesses) != 0))
7795 					m_unavailableWriteOperations[dstStage][srcStage][dstAccess] &= ~srcAccesses;
7796 
7797 				if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess] != 0)
7798 					allWritesAvailable = false;
7799 			}
7800 		}
7801 
7802 		// If all writes are available in dstStage make dstAccesses also visible
7803 		if (allWritesAvailable)
7804 			m_invisibleOperations[dstStage] &= ~dstAccesses;
7805 	}
7806 }
7807 
isClean(void) const7808 bool CacheState::isClean (void) const
7809 {
7810 	for (vk::VkPipelineStageFlags dstStage_ = 1; dstStage_ <= m_allowedStages; dstStage_ <<= 1)
7811 	{
7812 		const PipelineStage dstStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)dstStage_);
7813 
7814 		if ((dstStage_ & m_allowedStages) == 0)
7815 			continue;
7816 
7817 		// Some operations are not visible to some stages
7818 		if (m_invisibleOperations[dstStage] != 0)
7819 			return false;
7820 
7821 		// There are operation that have not completed yet
7822 		if (m_incompleteOperations[dstStage] != 0)
7823 			return false;
7824 
7825 		// Layout transition has not completed yet
7826 		if (m_unavailableLayoutTransition[dstStage])
7827 			return false;
7828 
7829 		for (vk::VkPipelineStageFlags srcStage_ = 1; srcStage_ <= m_allowedStages; srcStage_ <<= 1)
7830 		{
7831 			const PipelineStage srcStage = pipelineStageFlagToPipelineStage((vk::VkPipelineStageFlagBits)srcStage_);
7832 
7833 			if ((srcStage_ & m_allowedStages) == 0)
7834 				continue;
7835 
7836 			for (vk::VkAccessFlags dstAccess_ = 1; dstAccess_ <= m_allowedAccesses; dstAccess_ <<= 1)
7837 			{
7838 				const Access dstAccess = accessFlagToAccess((vk::VkAccessFlagBits)dstAccess_);
7839 
7840 				if ((dstAccess_ & m_allowedAccesses) == 0)
7841 					continue;
7842 
7843 				// Some write operations are not available yet
7844 				if (m_unavailableWriteOperations[dstStage][srcStage][dstAccess] != 0)
7845 					return false;
7846 			}
7847 		}
7848 	}
7849 
7850 	return true;
7851 }
7852 
layoutSupportedByUsage(Usage usage,vk::VkImageLayout layout)7853 bool layoutSupportedByUsage (Usage usage, vk::VkImageLayout layout)
7854 {
7855 	switch (layout)
7856 	{
7857 		case vk::VK_IMAGE_LAYOUT_GENERAL:
7858 			return true;
7859 
7860 		case vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
7861 			return (usage & USAGE_COLOR_ATTACHMENT) != 0;
7862 
7863 		case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
7864 			return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
7865 
7866 		case vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL:
7867 			return (usage & USAGE_DEPTH_STENCIL_ATTACHMENT) != 0;
7868 
7869 		case vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
7870 			// \todo [2016-03-09 mika] Should include input attachment
7871 			return (usage & USAGE_SAMPLED_IMAGE) != 0;
7872 
7873 		case vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
7874 			return (usage & USAGE_TRANSFER_SRC) != 0;
7875 
7876 		case vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
7877 			return (usage & USAGE_TRANSFER_DST) != 0;
7878 
7879 		case vk::VK_IMAGE_LAYOUT_PREINITIALIZED:
7880 			return true;
7881 
7882 		default:
7883 			DE_FATAL("Unknown layout");
7884 			return false;
7885 	}
7886 }
7887 
getNumberOfSupportedLayouts(Usage usage)7888 size_t getNumberOfSupportedLayouts (Usage usage)
7889 {
7890 	const vk::VkImageLayout layouts[] =
7891 	{
7892 		vk::VK_IMAGE_LAYOUT_GENERAL,
7893 		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
7894 		vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
7895 		vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
7896 		vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
7897 		vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
7898 		vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
7899 	};
7900 	size_t supportedLayoutCount = 0;
7901 
7902 	for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
7903 	{
7904 		const vk::VkImageLayout layout = layouts[layoutNdx];
7905 
7906 		if (layoutSupportedByUsage(usage, layout))
7907 			supportedLayoutCount++;
7908 	}
7909 
7910 	return supportedLayoutCount;
7911 }
7912 
getRandomNextLayout(de::Random & rng,Usage usage,vk::VkImageLayout previousLayout)7913 vk::VkImageLayout getRandomNextLayout (de::Random&			rng,
7914 									   Usage				usage,
7915 									   vk::VkImageLayout	previousLayout)
7916 {
7917 	const vk::VkImageLayout	layouts[] =
7918 	{
7919 		vk::VK_IMAGE_LAYOUT_GENERAL,
7920 		vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
7921 		vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
7922 		vk::VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
7923 		vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
7924 		vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
7925 		vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
7926 	};
7927 	const size_t			supportedLayoutCount = getNumberOfSupportedLayouts(usage);
7928 
7929 	DE_ASSERT(supportedLayoutCount > 0);
7930 
7931 	size_t nextLayoutNdx = ((size_t)rng.getUint64()) % (previousLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED
7932 														? supportedLayoutCount
7933 														: supportedLayoutCount - 1);
7934 
7935 	for (size_t layoutNdx = 0; layoutNdx < DE_LENGTH_OF_ARRAY(layouts); layoutNdx++)
7936 	{
7937 		const vk::VkImageLayout layout = layouts[layoutNdx];
7938 
7939 		if (layoutSupportedByUsage(usage, layout) && layout != previousLayout)
7940 		{
7941 			if (nextLayoutNdx == 0)
7942 				return layout;
7943 			else
7944 				nextLayoutNdx--;
7945 		}
7946 	}
7947 
7948 	DE_FATAL("Unreachable");
7949 	return vk::VK_IMAGE_LAYOUT_UNDEFINED;
7950 }
7951 
7952 struct State
7953 {
Statevkt::memory::__anon09d9cc850111::State7954 	State (Usage usage, deUint32 seed)
7955 		: stage							(STAGE_HOST)
7956 		, cache							(usageToStageFlags(usage), usageToAccessFlags(usage))
7957 		, rng							(seed)
7958 		, mapped						(false)
7959 		, hostInvalidated				(true)
7960 		, hostFlushed					(true)
7961 		, memoryDefined					(false)
7962 		, hasBuffer						(false)
7963 		, hasBoundBufferMemory			(false)
7964 		, hasImage						(false)
7965 		, hasBoundImageMemory			(false)
7966 		, imageLayout					(vk::VK_IMAGE_LAYOUT_UNDEFINED)
7967 		, imageDefined					(false)
7968 		, queueIdle						(true)
7969 		, deviceIdle					(true)
7970 		, commandBufferIsEmpty			(true)
7971 		, primaryCommandBufferIsEmpty	(true)
7972 		, renderPassIsEmpty				(true)
7973 	{
7974 	}
7975 
7976 	Stage				stage;
7977 	CacheState			cache;
7978 	de::Random			rng;
7979 
7980 	bool				mapped;
7981 	bool				hostInvalidated;
7982 	bool				hostFlushed;
7983 	bool				memoryDefined;
7984 
7985 	bool				hasBuffer;
7986 	bool				hasBoundBufferMemory;
7987 
7988 	bool				hasImage;
7989 	bool				hasBoundImageMemory;
7990 	vk::VkImageLayout	imageLayout;
7991 	bool				imageDefined;
7992 
7993 	bool				queueIdle;
7994 	bool				deviceIdle;
7995 
7996 	bool				commandBufferIsEmpty;
7997 
7998 	// a copy of commandBufferIsEmpty value, when secondary command buffer is in use
7999 	bool				primaryCommandBufferIsEmpty;
8000 
8001 	bool				renderPassIsEmpty;
8002 };
8003 
getAvailableOps(const State & state,bool supportsBuffers,bool supportsImages,Usage usage,vector<Op> & ops)8004 void getAvailableOps (const State& state, bool supportsBuffers, bool supportsImages, Usage usage, vector<Op>& ops)
8005 {
8006 	if (state.stage == STAGE_HOST)
8007 	{
8008 		if (usage & (USAGE_HOST_READ | USAGE_HOST_WRITE))
8009 		{
8010 			// Host memory operations
8011 			if (state.mapped)
8012 			{
8013 				ops.push_back(OP_UNMAP);
8014 
8015 				// Avoid flush and finish if they are not needed
8016 				if (!state.hostFlushed)
8017 					ops.push_back(OP_MAP_FLUSH);
8018 
8019 				if (!state.hostInvalidated
8020 					&& state.queueIdle
8021 					&& ((usage & USAGE_HOST_READ) == 0
8022 						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
8023 					&& ((usage & USAGE_HOST_WRITE) == 0
8024 						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)))
8025 				{
8026 					ops.push_back(OP_MAP_INVALIDATE);
8027 				}
8028 
8029 				if (usage & USAGE_HOST_READ
8030 					&& usage & USAGE_HOST_WRITE
8031 					&& state.memoryDefined
8032 					&& state.hostInvalidated
8033 					&& state.queueIdle
8034 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT)
8035 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
8036 				{
8037 					ops.push_back(OP_MAP_MODIFY);
8038 				}
8039 
8040 				if (usage & USAGE_HOST_READ
8041 					&& state.memoryDefined
8042 					&& state.hostInvalidated
8043 					&& state.queueIdle
8044 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_READ_BIT))
8045 				{
8046 					ops.push_back(OP_MAP_READ);
8047 				}
8048 
8049 				if (usage & USAGE_HOST_WRITE
8050 					&& state.hostInvalidated
8051 					&& state.queueIdle
8052 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_HOST_BIT, vk::VK_ACCESS_HOST_WRITE_BIT))
8053 				{
8054 					ops.push_back(OP_MAP_WRITE);
8055 				}
8056 			}
8057 			else
8058 				ops.push_back(OP_MAP);
8059 		}
8060 
8061 		if (state.hasBoundBufferMemory && state.queueIdle)
8062 		{
8063 			// \note Destroy only buffers after they have been bound
8064 			ops.push_back(OP_BUFFER_DESTROY);
8065 		}
8066 		else
8067 		{
8068 			if (state.hasBuffer)
8069 			{
8070 				if (!state.hasBoundBufferMemory)
8071 					ops.push_back(OP_BUFFER_BINDMEMORY);
8072 			}
8073 			else if (!state.hasImage && supportsBuffers)	// Avoid creating buffer if there is already image
8074 				ops.push_back(OP_BUFFER_CREATE);
8075 		}
8076 
8077 		if (state.hasBoundImageMemory && state.queueIdle)
8078 		{
8079 			// \note Destroy only image after they have been bound
8080 			ops.push_back(OP_IMAGE_DESTROY);
8081 		}
8082 		else
8083 		{
8084 			if (state.hasImage)
8085 			{
8086 				if (!state.hasBoundImageMemory)
8087 					ops.push_back(OP_IMAGE_BINDMEMORY);
8088 			}
8089 			else if (!state.hasBuffer && supportsImages)	// Avoid creating image if there is already buffer
8090 				ops.push_back(OP_IMAGE_CREATE);
8091 		}
8092 
8093 		// Host writes must be flushed before GPU commands and there must be
8094 		// buffer or image for GPU commands
8095 		if (state.hostFlushed
8096 			&& (state.memoryDefined || supportsDeviceBufferWrites(usage) || state.imageDefined || supportsDeviceImageWrites(usage))
8097 			&& (state.hasBoundBufferMemory || state.hasBoundImageMemory) // Avoid command buffers if there is no object to use
8098 			&& (usageToStageFlags(usage) & (~vk::VK_PIPELINE_STAGE_HOST_BIT)) != 0) // Don't start command buffer if there are no ways to use memory from gpu
8099 		{
8100 			ops.push_back(OP_COMMAND_BUFFER_BEGIN);
8101 		}
8102 
8103 		if (!state.deviceIdle)
8104 			ops.push_back(OP_DEVICE_WAIT_FOR_IDLE);
8105 
8106 		if (!state.queueIdle)
8107 			ops.push_back(OP_QUEUE_WAIT_FOR_IDLE);
8108 	}
8109 	else if (state.stage == STAGE_COMMAND_BUFFER)
8110 	{
8111 		if (!state.cache.isClean())
8112 		{
8113 			ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
8114 
8115 			if (state.hasImage && (state.imageLayout != vk::VK_IMAGE_LAYOUT_UNDEFINED))
8116 				ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
8117 
8118 			if (state.hasBuffer)
8119 				ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
8120 		}
8121 
8122 		if (state.hasBoundBufferMemory)
8123 		{
8124 			if (usage & USAGE_TRANSFER_DST
8125 				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
8126 			{
8127 				ops.push_back(OP_BUFFER_FILL);
8128 				ops.push_back(OP_BUFFER_UPDATE);
8129 				ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
8130 				ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
8131 			}
8132 
8133 			if (usage & USAGE_TRANSFER_SRC
8134 				&& state.memoryDefined
8135 				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
8136 			{
8137 				ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
8138 				ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
8139 			}
8140 		}
8141 
8142 		if (state.hasBoundImageMemory
8143 			&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED
8144 				|| getNumberOfSupportedLayouts(usage) > 1))
8145 		{
8146 			ops.push_back(OP_IMAGE_TRANSITION_LAYOUT);
8147 
8148 			{
8149 				if (usage & USAGE_TRANSFER_DST
8150 					&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
8151 						|| state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
8152 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
8153 				{
8154 					ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
8155 					ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
8156 					ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
8157 				}
8158 
8159 				if (usage & USAGE_TRANSFER_SRC
8160 					&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
8161 						|| state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
8162 					&& state.imageDefined
8163 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
8164 				{
8165 					ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
8166 					ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
8167 					ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
8168 				}
8169 			}
8170 		}
8171 
8172 		// \todo [2016-03-09 mika] Add other usages?
8173 		if ((state.memoryDefined
8174 				&& state.hasBoundBufferMemory
8175 				&& (((usage & USAGE_VERTEX_BUFFER)
8176 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
8177 				|| ((usage & USAGE_INDEX_BUFFER)
8178 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
8179 				|| ((usage & USAGE_UNIFORM_BUFFER)
8180 					&& (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT)
8181 						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT)))
8182 				|| ((usage & USAGE_UNIFORM_TEXEL_BUFFER)
8183 					&& (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT)
8184 						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT)))
8185 				|| ((usage & USAGE_STORAGE_BUFFER)
8186 					&& (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)
8187 						|| state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)))
8188 				|| ((usage & USAGE_STORAGE_TEXEL_BUFFER)
8189 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))))
8190 			|| (state.imageDefined
8191 				&& state.hasBoundImageMemory
8192 				&& (((usage & USAGE_STORAGE_IMAGE)
8193 						&& state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
8194 						&& (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)
8195 							|| state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)))
8196 					|| ((usage & USAGE_SAMPLED_IMAGE)
8197 						&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
8198 							|| state.imageLayout == vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
8199 						&& (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT)
8200 							|| state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))))))
8201 		{
8202 			ops.push_back(OP_RENDERPASS_BEGIN);
8203 		}
8204 
8205 		ops.push_back(OP_SECONDARY_COMMAND_BUFFER_BEGIN);
8206 
8207 		// \note This depends on previous operations and has to be always the
8208 		// last command buffer operation check
8209 		if (ops.empty() || !state.commandBufferIsEmpty)
8210 			ops.push_back(OP_COMMAND_BUFFER_END);
8211 	}
8212 	else if (state.stage == STAGE_SECONDARY_COMMAND_BUFFER)
8213 	{
8214 		if (!state.cache.isClean())
8215 		{
8216 			ops.push_back(OP_PIPELINE_BARRIER_GLOBAL);
8217 
8218 			if (state.hasImage && (state.imageLayout != vk::VK_IMAGE_LAYOUT_UNDEFINED))
8219 				ops.push_back(OP_PIPELINE_BARRIER_IMAGE);
8220 
8221 			if (state.hasBuffer)
8222 				ops.push_back(OP_PIPELINE_BARRIER_BUFFER);
8223 		}
8224 
8225 		if (state.hasBoundBufferMemory)
8226 		{
8227 			if (usage & USAGE_TRANSFER_DST
8228 				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
8229 			{
8230 				ops.push_back(OP_BUFFER_FILL);
8231 				ops.push_back(OP_BUFFER_UPDATE);
8232 				ops.push_back(OP_BUFFER_COPY_FROM_BUFFER);
8233 				ops.push_back(OP_BUFFER_COPY_FROM_IMAGE);
8234 			}
8235 
8236 			if (usage & USAGE_TRANSFER_SRC
8237 				&& state.memoryDefined
8238 				&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
8239 			{
8240 				ops.push_back(OP_BUFFER_COPY_TO_BUFFER);
8241 				ops.push_back(OP_BUFFER_COPY_TO_IMAGE);
8242 			}
8243 		}
8244 
8245 		if (state.hasBoundImageMemory
8246 			&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED
8247 				|| getNumberOfSupportedLayouts(usage) > 1))
8248 		{
8249 			ops.push_back(OP_IMAGE_TRANSITION_LAYOUT);
8250 
8251 			{
8252 				if (usage & USAGE_TRANSFER_DST
8253 					&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
8254 						|| state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
8255 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT))
8256 				{
8257 					ops.push_back(OP_IMAGE_COPY_FROM_BUFFER);
8258 					ops.push_back(OP_IMAGE_COPY_FROM_IMAGE);
8259 					ops.push_back(OP_IMAGE_BLIT_FROM_IMAGE);
8260 				}
8261 
8262 				if (usage & USAGE_TRANSFER_SRC
8263 					&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
8264 						|| state.imageLayout == vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
8265 					&& state.imageDefined
8266 					&& state.cache.isValid(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT))
8267 				{
8268 					ops.push_back(OP_IMAGE_COPY_TO_BUFFER);
8269 					ops.push_back(OP_IMAGE_COPY_TO_IMAGE);
8270 					ops.push_back(OP_IMAGE_BLIT_TO_IMAGE);
8271 				}
8272 			}
8273 		}
8274 
8275 		// \note This depends on previous operations and has to be always the
8276 		// last command buffer operation check
8277 		if (ops.empty() || !state.commandBufferIsEmpty)
8278 			ops.push_back(OP_SECONDARY_COMMAND_BUFFER_END);
8279 	}
8280 	else if (state.stage == STAGE_RENDER_PASS)
8281 	{
8282 		if ((usage & USAGE_VERTEX_BUFFER) != 0
8283 			&& state.memoryDefined
8284 			&& state.hasBoundBufferMemory
8285 			&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT))
8286 		{
8287 			ops.push_back(OP_RENDER_VERTEX_BUFFER);
8288 		}
8289 
8290 		if ((usage & USAGE_INDEX_BUFFER) != 0
8291 			&& state.memoryDefined
8292 			&& state.hasBoundBufferMemory
8293 			&& state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT))
8294 		{
8295 			ops.push_back(OP_RENDER_INDEX_BUFFER);
8296 		}
8297 
8298 		if ((usage & USAGE_UNIFORM_BUFFER) != 0
8299 			&& state.memoryDefined
8300 			&& state.hasBoundBufferMemory)
8301 		{
8302 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
8303 				ops.push_back(OP_RENDER_VERTEX_UNIFORM_BUFFER);
8304 
8305 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
8306 				ops.push_back(OP_RENDER_FRAGMENT_UNIFORM_BUFFER);
8307 		}
8308 
8309 		if ((usage & USAGE_UNIFORM_TEXEL_BUFFER) != 0
8310 			&& state.memoryDefined
8311 			&& state.hasBoundBufferMemory)
8312 		{
8313 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
8314 				ops.push_back(OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER);
8315 
8316 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT))
8317 				ops.push_back(OP_RENDER_FRAGMENT_UNIFORM_TEXEL_BUFFER);
8318 		}
8319 
8320 		if ((usage & USAGE_STORAGE_BUFFER) != 0
8321 			&& state.memoryDefined
8322 			&& state.hasBoundBufferMemory)
8323 		{
8324 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8325 				ops.push_back(OP_RENDER_VERTEX_STORAGE_BUFFER);
8326 
8327 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8328 				ops.push_back(OP_RENDER_FRAGMENT_STORAGE_BUFFER);
8329 		}
8330 
8331 		if ((usage & USAGE_STORAGE_TEXEL_BUFFER) != 0
8332 			&& state.memoryDefined
8333 			&& state.hasBoundBufferMemory)
8334 		{
8335 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8336 				ops.push_back(OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER);
8337 
8338 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8339 				ops.push_back(OP_RENDER_FRAGMENT_STORAGE_TEXEL_BUFFER);
8340 		}
8341 
8342 		if ((usage & USAGE_STORAGE_IMAGE) != 0
8343 			&& state.imageDefined
8344 			&& state.hasBoundImageMemory
8345 			&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL))
8346 		{
8347 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8348 				ops.push_back(OP_RENDER_VERTEX_STORAGE_IMAGE);
8349 
8350 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8351 				ops.push_back(OP_RENDER_FRAGMENT_STORAGE_IMAGE);
8352 		}
8353 
8354 		if ((usage & USAGE_SAMPLED_IMAGE) != 0
8355 			&& state.imageDefined
8356 			&& state.hasBoundImageMemory
8357 			&& (state.imageLayout == vk::VK_IMAGE_LAYOUT_GENERAL
8358 				|| state.imageLayout == vk::VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL))
8359 		{
8360 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8361 				ops.push_back(OP_RENDER_VERTEX_SAMPLED_IMAGE);
8362 
8363 			if (state.cache.isValid(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT))
8364 				ops.push_back(OP_RENDER_FRAGMENT_SAMPLED_IMAGE);
8365 		}
8366 
8367 		if (!state.renderPassIsEmpty)
8368 			ops.push_back(OP_RENDERPASS_END);
8369 	}
8370 	else
8371 		DE_FATAL("Unknown stage");
8372 }
8373 
removeIllegalAccessFlags(vk::VkAccessFlags & accessflags,vk::VkPipelineStageFlags stageflags)8374 void removeIllegalAccessFlags (vk::VkAccessFlags& accessflags, vk::VkPipelineStageFlags stageflags)
8375 {
8376 	if (!(stageflags & vk::VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT))
8377 		accessflags &= ~vk::VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
8378 
8379 	if (!(stageflags & vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT))
8380 		accessflags &= ~vk::VK_ACCESS_INDEX_READ_BIT;
8381 
8382 	if (!(stageflags & vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT))
8383 		accessflags &= ~vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
8384 
8385 	if (!(stageflags & (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
8386 						vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
8387 						vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT |
8388 						vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
8389 						vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
8390 						vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT)))
8391 		accessflags &= ~vk::VK_ACCESS_UNIFORM_READ_BIT;
8392 
8393 	if (!(stageflags & vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT))
8394 		accessflags &= ~vk::VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
8395 
8396 	if (!(stageflags & (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
8397 						vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
8398 						vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT |
8399 						vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
8400 						vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
8401 						vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT)))
8402 		accessflags &= ~vk::VK_ACCESS_SHADER_READ_BIT;
8403 
8404 	if (!(stageflags & (vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT |
8405 						vk::VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT |
8406 						vk::VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT |
8407 						vk::VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT |
8408 						vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT |
8409 						vk::VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT)))
8410 		accessflags &= ~vk::VK_ACCESS_SHADER_WRITE_BIT;
8411 
8412 	if (!(stageflags & vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT))
8413 		accessflags &= ~vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
8414 
8415 	if (!(stageflags & vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT))
8416 		accessflags &= ~vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
8417 
8418 	if (!(stageflags & (vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
8419 						vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT)))
8420 		accessflags &= ~vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
8421 
8422 	if (!(stageflags & (vk::VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
8423 						vk::VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT)))
8424 		accessflags &= ~vk::VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
8425 
8426 	if (!(stageflags & vk::VK_PIPELINE_STAGE_TRANSFER_BIT))
8427 		accessflags &= ~vk::VK_ACCESS_TRANSFER_READ_BIT;
8428 
8429 	if (!(stageflags & vk::VK_PIPELINE_STAGE_TRANSFER_BIT))
8430 		accessflags &= ~vk::VK_ACCESS_TRANSFER_WRITE_BIT;
8431 
8432 	if (!(stageflags & vk::VK_PIPELINE_STAGE_HOST_BIT))
8433 		accessflags &= ~vk::VK_ACCESS_HOST_READ_BIT;
8434 
8435 	if (!(stageflags & vk::VK_PIPELINE_STAGE_HOST_BIT))
8436 		accessflags &= ~vk::VK_ACCESS_HOST_WRITE_BIT;
8437 }
8438 
applyOp(State & state,const Memory & memory,Op op,Usage usage)8439 void applyOp (State& state, const Memory& memory, Op op, Usage usage)
8440 {
8441 	switch (op)
8442 	{
8443 		case OP_MAP:
8444 			DE_ASSERT(state.stage == STAGE_HOST);
8445 			DE_ASSERT(!state.mapped);
8446 			state.mapped = true;
8447 			break;
8448 
8449 		case OP_UNMAP:
8450 			DE_ASSERT(state.stage == STAGE_HOST);
8451 			DE_ASSERT(state.mapped);
8452 			state.mapped = false;
8453 			break;
8454 
8455 		case OP_MAP_FLUSH:
8456 			DE_ASSERT(state.stage == STAGE_HOST);
8457 			DE_ASSERT(!state.hostFlushed);
8458 			state.hostFlushed = true;
8459 			break;
8460 
8461 		case OP_MAP_INVALIDATE:
8462 			DE_ASSERT(state.stage == STAGE_HOST);
8463 			DE_ASSERT(!state.hostInvalidated);
8464 			state.hostInvalidated = true;
8465 			break;
8466 
8467 		case OP_MAP_READ:
8468 			DE_ASSERT(state.stage == STAGE_HOST);
8469 			DE_ASSERT(state.hostInvalidated);
8470 			state.rng.getUint32();
8471 			break;
8472 
8473 		case OP_MAP_WRITE:
8474 			DE_ASSERT(state.stage == STAGE_HOST);
8475 			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
8476 				state.hostFlushed = false;
8477 
8478 			state.memoryDefined = true;
8479 			state.imageDefined = false;
8480 			state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
8481 			state.rng.getUint32();
8482 			break;
8483 
8484 		case OP_MAP_MODIFY:
8485 			DE_ASSERT(state.stage == STAGE_HOST);
8486 			DE_ASSERT(state.hostInvalidated);
8487 
8488 			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
8489 				state.hostFlushed = false;
8490 
8491 			state.rng.getUint32();
8492 			break;
8493 
8494 		case OP_BUFFER_CREATE:
8495 			DE_ASSERT(state.stage == STAGE_HOST);
8496 			DE_ASSERT(!state.hasBuffer);
8497 
8498 			state.hasBuffer = true;
8499 			break;
8500 
8501 		case OP_BUFFER_DESTROY:
8502 			DE_ASSERT(state.stage == STAGE_HOST);
8503 			DE_ASSERT(state.hasBuffer);
8504 			DE_ASSERT(state.hasBoundBufferMemory);
8505 
8506 			state.hasBuffer = false;
8507 			state.hasBoundBufferMemory = false;
8508 			break;
8509 
8510 		case OP_BUFFER_BINDMEMORY:
8511 			DE_ASSERT(state.stage == STAGE_HOST);
8512 			DE_ASSERT(state.hasBuffer);
8513 			DE_ASSERT(!state.hasBoundBufferMemory);
8514 
8515 			state.hasBoundBufferMemory = true;
8516 			break;
8517 
8518 		case OP_IMAGE_CREATE:
8519 			DE_ASSERT(state.stage == STAGE_HOST);
8520 			DE_ASSERT(!state.hasImage);
8521 			DE_ASSERT(!state.hasBuffer);
8522 
8523 			state.hasImage = true;
8524 			break;
8525 
8526 		case OP_IMAGE_DESTROY:
8527 			DE_ASSERT(state.stage == STAGE_HOST);
8528 			DE_ASSERT(state.hasImage);
8529 			DE_ASSERT(state.hasBoundImageMemory);
8530 
8531 			state.hasImage = false;
8532 			state.hasBoundImageMemory = false;
8533 			state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
8534 			state.imageDefined = false;
8535 			break;
8536 
8537 		case OP_IMAGE_BINDMEMORY:
8538 			DE_ASSERT(state.stage == STAGE_HOST);
8539 			DE_ASSERT(state.hasImage);
8540 			DE_ASSERT(!state.hasBoundImageMemory);
8541 
8542 			state.hasBoundImageMemory = true;
8543 			break;
8544 
8545 		case OP_IMAGE_TRANSITION_LAYOUT:
8546 		{
8547 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8548 			DE_ASSERT(state.hasImage);
8549 			DE_ASSERT(state.hasBoundImageMemory);
8550 
8551 			// \todo [2016-03-09 mika] Support linear tiling and predefined data
8552 			const vk::VkImageLayout		srcLayout	= state.rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
8553 			const vk::VkImageLayout		dstLayout	= getRandomNextLayout(state.rng, usage, srcLayout);
8554 
8555 			vk::VkPipelineStageFlags	dirtySrcStages;
8556 			vk::VkAccessFlags			dirtySrcAccesses;
8557 			vk::VkPipelineStageFlags	dirtyDstStages;
8558 			vk::VkAccessFlags			dirtyDstAccesses;
8559 
8560 			vk::VkPipelineStageFlags	srcStages;
8561 			vk::VkAccessFlags			srcAccesses;
8562 			vk::VkPipelineStageFlags	dstStages;
8563 			vk::VkAccessFlags			dstAccesses;
8564 
8565 			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
8566 
8567 			// Try masking some random bits
8568 			srcStages	= dirtySrcStages;
8569 			srcAccesses	= dirtySrcAccesses;
8570 
8571 			dstStages	= state.cache.getAllowedStages() & state.rng.getUint32();
8572 			dstAccesses	= state.cache.getAllowedAcceses() & state.rng.getUint32();
8573 
8574 			// If there are no bits in dst stage mask use all stages
8575 			dstStages	= dstStages ? dstStages : state.cache.getAllowedStages();
8576 
8577 			if (!srcStages)
8578 				srcStages = dstStages;
8579 
8580 			removeIllegalAccessFlags(dstAccesses, dstStages);
8581 			removeIllegalAccessFlags(srcAccesses, srcStages);
8582 
8583 			if (srcLayout == vk::VK_IMAGE_LAYOUT_UNDEFINED)
8584 				state.imageDefined = false;
8585 
8586 			state.commandBufferIsEmpty = false;
8587 			state.imageLayout = dstLayout;
8588 			state.memoryDefined = false;
8589 			state.cache.imageLayoutBarrier(srcStages, srcAccesses, dstStages, dstAccesses);
8590 			break;
8591 		}
8592 
8593 		case OP_QUEUE_WAIT_FOR_IDLE:
8594 			DE_ASSERT(state.stage == STAGE_HOST);
8595 			DE_ASSERT(!state.queueIdle);
8596 
8597 			state.queueIdle = true;
8598 
8599 			state.cache.waitForIdle();
8600 			break;
8601 
8602 		case OP_DEVICE_WAIT_FOR_IDLE:
8603 			DE_ASSERT(state.stage == STAGE_HOST);
8604 			DE_ASSERT(!state.deviceIdle);
8605 
8606 			state.queueIdle = true;
8607 			state.deviceIdle = true;
8608 
8609 			state.cache.waitForIdle();
8610 			break;
8611 
8612 		case OP_COMMAND_BUFFER_BEGIN:
8613 			DE_ASSERT(state.stage == STAGE_HOST);
8614 			state.stage = STAGE_COMMAND_BUFFER;
8615 			state.commandBufferIsEmpty = true;
8616 			// Makes host writes visible to command buffer
8617 			state.cache.submitCommandBuffer();
8618 			break;
8619 
8620 		case OP_COMMAND_BUFFER_END:
8621 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8622 			state.stage = STAGE_HOST;
8623 			state.queueIdle = false;
8624 			state.deviceIdle = false;
8625 			break;
8626 
8627 		case OP_SECONDARY_COMMAND_BUFFER_BEGIN:
8628 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8629 			state.stage = STAGE_SECONDARY_COMMAND_BUFFER;
8630 			state.primaryCommandBufferIsEmpty = state.commandBufferIsEmpty;
8631 			state.commandBufferIsEmpty = true;
8632 			break;
8633 
8634 		case OP_SECONDARY_COMMAND_BUFFER_END:
8635 			DE_ASSERT(state.stage == STAGE_SECONDARY_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8636 			state.stage = STAGE_COMMAND_BUFFER;
8637 			state.commandBufferIsEmpty = state.primaryCommandBufferIsEmpty;
8638 			break;
8639 
8640 		case OP_BUFFER_COPY_FROM_BUFFER:
8641 		case OP_BUFFER_COPY_FROM_IMAGE:
8642 		case OP_BUFFER_UPDATE:
8643 		case OP_BUFFER_FILL:
8644 			state.rng.getUint32();
8645 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8646 
8647 			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
8648 				state.hostInvalidated = false;
8649 
8650 			state.commandBufferIsEmpty = false;
8651 			state.memoryDefined = true;
8652 			state.imageDefined = false;
8653 			state.imageLayout = vk::VK_IMAGE_LAYOUT_UNDEFINED;
8654 			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
8655 			break;
8656 
8657 		case OP_BUFFER_COPY_TO_BUFFER:
8658 		case OP_BUFFER_COPY_TO_IMAGE:
8659 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8660 
8661 			state.commandBufferIsEmpty = false;
8662 			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
8663 			break;
8664 
8665 		case OP_IMAGE_BLIT_FROM_IMAGE:
8666 			state.rng.getBool();
8667 			// Fall through
8668 		case OP_IMAGE_COPY_FROM_BUFFER:
8669 		case OP_IMAGE_COPY_FROM_IMAGE:
8670 			state.rng.getUint32();
8671 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8672 
8673 			if ((memory.getMemoryType().propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) == 0)
8674 				state.hostInvalidated = false;
8675 
8676 			state.commandBufferIsEmpty = false;
8677 			state.memoryDefined = false;
8678 			state.imageDefined = true;
8679 			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_WRITE_BIT);
8680 			break;
8681 
8682 		case OP_IMAGE_BLIT_TO_IMAGE:
8683 			state.rng.getBool();
8684 			// Fall through
8685 		case OP_IMAGE_COPY_TO_BUFFER:
8686 		case OP_IMAGE_COPY_TO_IMAGE:
8687 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8688 
8689 			state.commandBufferIsEmpty = false;
8690 			state.cache.perform(vk::VK_PIPELINE_STAGE_TRANSFER_BIT, vk::VK_ACCESS_TRANSFER_READ_BIT);
8691 			break;
8692 
8693 		case OP_PIPELINE_BARRIER_GLOBAL:
8694 		case OP_PIPELINE_BARRIER_BUFFER:
8695 		case OP_PIPELINE_BARRIER_IMAGE:
8696 		{
8697 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8698 
8699 			vk::VkPipelineStageFlags	dirtySrcStages;
8700 			vk::VkAccessFlags			dirtySrcAccesses;
8701 			vk::VkPipelineStageFlags	dirtyDstStages;
8702 			vk::VkAccessFlags			dirtyDstAccesses;
8703 
8704 			vk::VkPipelineStageFlags	srcStages;
8705 			vk::VkAccessFlags			srcAccesses;
8706 			vk::VkPipelineStageFlags	dstStages;
8707 			vk::VkAccessFlags			dstAccesses;
8708 
8709 			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
8710 
8711 			// Try masking some random bits
8712 			srcStages	= dirtySrcStages & state.rng.getUint32();
8713 			srcAccesses	= dirtySrcAccesses & state.rng.getUint32();
8714 
8715 			dstStages	= dirtyDstStages & state.rng.getUint32();
8716 			dstAccesses	= dirtyDstAccesses & state.rng.getUint32();
8717 
8718 			// If there are no bits in stage mask use the original dirty stages
8719 			srcStages	= srcStages ? srcStages : dirtySrcStages;
8720 			dstStages	= dstStages ? dstStages : dirtyDstStages;
8721 
8722 			if (!srcStages)
8723 				srcStages = dstStages;
8724 
8725 			removeIllegalAccessFlags(dstAccesses, dstStages);
8726 			removeIllegalAccessFlags(srcAccesses, srcStages);
8727 
8728 			state.commandBufferIsEmpty = false;
8729 			state.cache.barrier(srcStages, srcAccesses, dstStages, dstAccesses);
8730 			break;
8731 		}
8732 
8733 		case OP_RENDERPASS_BEGIN:
8734 		{
8735 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER);
8736 
8737 			state.renderPassIsEmpty	= true;
8738 			state.stage				= STAGE_RENDER_PASS;
8739 			break;
8740 		}
8741 
8742 		case OP_RENDERPASS_END:
8743 		{
8744 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8745 
8746 			state.renderPassIsEmpty	= true;
8747 			state.stage				= STAGE_COMMAND_BUFFER;
8748 			break;
8749 		}
8750 
8751 		case OP_RENDER_VERTEX_BUFFER:
8752 		{
8753 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8754 
8755 			state.renderPassIsEmpty = false;
8756 			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT);
8757 			break;
8758 		}
8759 
8760 		case OP_RENDER_INDEX_BUFFER:
8761 		{
8762 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8763 
8764 			state.renderPassIsEmpty = false;
8765 			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, vk::VK_ACCESS_INDEX_READ_BIT);
8766 			break;
8767 		}
8768 
8769 		case OP_RENDER_VERTEX_UNIFORM_BUFFER:
8770 		case OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER:
8771 		{
8772 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8773 
8774 			state.renderPassIsEmpty = false;
8775 			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT);
8776 			break;
8777 		}
8778 
8779 		case OP_RENDER_FRAGMENT_UNIFORM_BUFFER:
8780 		case OP_RENDER_FRAGMENT_UNIFORM_TEXEL_BUFFER:
8781 		{
8782 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8783 
8784 			state.renderPassIsEmpty = false;
8785 			state.cache.perform(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_UNIFORM_READ_BIT);
8786 			break;
8787 		}
8788 
8789 		case OP_RENDER_VERTEX_STORAGE_BUFFER:
8790 		case OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER:
8791 		{
8792 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8793 
8794 			state.renderPassIsEmpty = false;
8795 			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
8796 			break;
8797 		}
8798 
8799 		case OP_RENDER_FRAGMENT_STORAGE_BUFFER:
8800 		case OP_RENDER_FRAGMENT_STORAGE_TEXEL_BUFFER:
8801 		{
8802 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8803 
8804 			state.renderPassIsEmpty = false;
8805 			state.cache.perform(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
8806 			break;
8807 		}
8808 
8809 		case OP_RENDER_FRAGMENT_STORAGE_IMAGE:
8810 		case OP_RENDER_FRAGMENT_SAMPLED_IMAGE:
8811 		{
8812 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8813 
8814 			state.renderPassIsEmpty = false;
8815 			state.cache.perform(vk::VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
8816 			break;
8817 		}
8818 
8819 		case OP_RENDER_VERTEX_STORAGE_IMAGE:
8820 		case OP_RENDER_VERTEX_SAMPLED_IMAGE:
8821 		{
8822 			DE_ASSERT(state.stage == STAGE_RENDER_PASS);
8823 
8824 			state.renderPassIsEmpty = false;
8825 			state.cache.perform(vk::VK_PIPELINE_STAGE_VERTEX_SHADER_BIT, vk::VK_ACCESS_SHADER_READ_BIT);
8826 			break;
8827 		}
8828 
8829 		default:
8830 			DE_FATAL("Unknown op");
8831 	}
8832 }
8833 
createHostCommand(Op op,de::Random & rng,Usage usage,vk::VkSharingMode sharing)8834 de::MovePtr<Command> createHostCommand (Op					op,
8835 										de::Random&			rng,
8836 										Usage				usage,
8837 										vk::VkSharingMode	sharing)
8838 {
8839 	switch (op)
8840 	{
8841 		case OP_MAP:					return de::MovePtr<Command>(new Map());
8842 		case OP_UNMAP:					return de::MovePtr<Command>(new UnMap());
8843 
8844 		case OP_MAP_FLUSH:				return de::MovePtr<Command>(new Flush());
8845 		case OP_MAP_INVALIDATE:			return de::MovePtr<Command>(new Invalidate());
8846 
8847 		case OP_MAP_READ:				return de::MovePtr<Command>(new HostMemoryAccess(true, false, rng.getUint32()));
8848 		case OP_MAP_WRITE:				return de::MovePtr<Command>(new HostMemoryAccess(false, true, rng.getUint32()));
8849 		case OP_MAP_MODIFY:				return de::MovePtr<Command>(new HostMemoryAccess(true, true, rng.getUint32()));
8850 
8851 		case OP_BUFFER_CREATE:			return de::MovePtr<Command>(new CreateBuffer(usageToBufferUsageFlags(usage), sharing));
8852 		case OP_BUFFER_DESTROY:			return de::MovePtr<Command>(new DestroyBuffer());
8853 		case OP_BUFFER_BINDMEMORY:		return de::MovePtr<Command>(new BindBufferMemory());
8854 
8855 		case OP_IMAGE_CREATE:			return de::MovePtr<Command>(new CreateImage(usageToImageUsageFlags(usage), sharing));
8856 		case OP_IMAGE_DESTROY:			return de::MovePtr<Command>(new DestroyImage());
8857 		case OP_IMAGE_BINDMEMORY:		return de::MovePtr<Command>(new BindImageMemory());
8858 
8859 		case OP_QUEUE_WAIT_FOR_IDLE:	return de::MovePtr<Command>(new QueueWaitIdle());
8860 		case OP_DEVICE_WAIT_FOR_IDLE:	return de::MovePtr<Command>(new DeviceWaitIdle());
8861 
8862 		default:
8863 			DE_FATAL("Unknown op");
8864 			return de::MovePtr<Command>(DE_NULL);
8865 	}
8866 }
8867 
createCmdCommand(de::Random & rng,const State & state,Op op,Usage usage)8868 de::MovePtr<CmdCommand> createCmdCommand (de::Random&	rng,
8869 										  const State&	state,
8870 										  Op			op,
8871 										  Usage			usage)
8872 {
8873 	switch (op)
8874 	{
8875 		case OP_BUFFER_FILL:					return de::MovePtr<CmdCommand>(new FillBuffer(rng.getUint32()));
8876 		case OP_BUFFER_UPDATE:					return de::MovePtr<CmdCommand>(new UpdateBuffer(rng.getUint32()));
8877 		case OP_BUFFER_COPY_TO_BUFFER:			return de::MovePtr<CmdCommand>(new BufferCopyToBuffer());
8878 		case OP_BUFFER_COPY_FROM_BUFFER:		return de::MovePtr<CmdCommand>(new BufferCopyFromBuffer(rng.getUint32()));
8879 
8880 		case OP_BUFFER_COPY_TO_IMAGE:			return de::MovePtr<CmdCommand>(new BufferCopyToImage());
8881 		case OP_BUFFER_COPY_FROM_IMAGE:			return de::MovePtr<CmdCommand>(new BufferCopyFromImage(rng.getUint32()));
8882 
8883 		case OP_IMAGE_TRANSITION_LAYOUT:
8884 		{
8885 			DE_ASSERT(state.stage == STAGE_COMMAND_BUFFER || state.stage == STAGE_SECONDARY_COMMAND_BUFFER);
8886 			DE_ASSERT(state.hasImage);
8887 			DE_ASSERT(state.hasBoundImageMemory);
8888 
8889 			const vk::VkImageLayout		srcLayout	= rng.getFloat() < 0.9f ? state.imageLayout : vk::VK_IMAGE_LAYOUT_UNDEFINED;
8890 			const vk::VkImageLayout		dstLayout	= getRandomNextLayout(rng, usage, srcLayout);
8891 
8892 			vk::VkPipelineStageFlags	dirtySrcStages;
8893 			vk::VkAccessFlags			dirtySrcAccesses;
8894 			vk::VkPipelineStageFlags	dirtyDstStages;
8895 			vk::VkAccessFlags			dirtyDstAccesses;
8896 
8897 			vk::VkPipelineStageFlags	srcStages;
8898 			vk::VkAccessFlags			srcAccesses;
8899 			vk::VkPipelineStageFlags	dstStages;
8900 			vk::VkAccessFlags			dstAccesses;
8901 
8902 			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
8903 
8904 			// Try masking some random bits
8905 			srcStages	= dirtySrcStages;
8906 			srcAccesses	= dirtySrcAccesses;
8907 
8908 			dstStages	= state.cache.getAllowedStages() & rng.getUint32();
8909 			dstAccesses	= state.cache.getAllowedAcceses() & rng.getUint32();
8910 
8911 			// If there are no bits in dst stage mask use all stages
8912 			dstStages	= dstStages ? dstStages : state.cache.getAllowedStages();
8913 
8914 			if (!srcStages)
8915 				srcStages = dstStages;
8916 
8917 			removeIllegalAccessFlags(dstAccesses, dstStages);
8918 			removeIllegalAccessFlags(srcAccesses, srcStages);
8919 
8920 			return de::MovePtr<CmdCommand>(new ImageTransition(srcStages, srcAccesses, dstStages, dstAccesses, srcLayout, dstLayout));
8921 		}
8922 
8923 		case OP_IMAGE_COPY_TO_BUFFER:			return de::MovePtr<CmdCommand>(new ImageCopyToBuffer(state.imageLayout));
8924 		case OP_IMAGE_COPY_FROM_BUFFER:			return de::MovePtr<CmdCommand>(new ImageCopyFromBuffer(rng.getUint32(), state.imageLayout));
8925 		case OP_IMAGE_COPY_TO_IMAGE:			return de::MovePtr<CmdCommand>(new ImageCopyToImage(state.imageLayout));
8926 		case OP_IMAGE_COPY_FROM_IMAGE:			return de::MovePtr<CmdCommand>(new ImageCopyFromImage(rng.getUint32(), state.imageLayout));
8927 		case OP_IMAGE_BLIT_TO_IMAGE:
8928 		{
8929 			const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
8930 			return de::MovePtr<CmdCommand>(new ImageBlitToImage(scale, state.imageLayout));
8931 		}
8932 
8933 		case OP_IMAGE_BLIT_FROM_IMAGE:
8934 		{
8935 			const BlitScale scale = rng.getBool() ? BLIT_SCALE_20 : BLIT_SCALE_10;
8936 			return de::MovePtr<CmdCommand>(new ImageBlitFromImage(rng.getUint32(), scale, state.imageLayout));
8937 		}
8938 
8939 		case OP_PIPELINE_BARRIER_GLOBAL:
8940 		case OP_PIPELINE_BARRIER_BUFFER:
8941 		case OP_PIPELINE_BARRIER_IMAGE:
8942 		{
8943 			vk::VkPipelineStageFlags	dirtySrcStages;
8944 			vk::VkAccessFlags			dirtySrcAccesses;
8945 			vk::VkPipelineStageFlags	dirtyDstStages;
8946 			vk::VkAccessFlags			dirtyDstAccesses;
8947 
8948 			vk::VkPipelineStageFlags	srcStages;
8949 			vk::VkAccessFlags			srcAccesses;
8950 			vk::VkPipelineStageFlags	dstStages;
8951 			vk::VkAccessFlags			dstAccesses;
8952 
8953 			state.cache.getFullBarrier(dirtySrcStages, dirtySrcAccesses, dirtyDstStages, dirtyDstAccesses);
8954 
8955 			// Try masking some random bits
8956 			srcStages	= dirtySrcStages & rng.getUint32();
8957 			srcAccesses	= dirtySrcAccesses & rng.getUint32();
8958 
8959 			dstStages	= dirtyDstStages & rng.getUint32();
8960 			dstAccesses	= dirtyDstAccesses & rng.getUint32();
8961 
8962 			// If there are no bits in stage mask use the original dirty stages
8963 			srcStages	= srcStages ? srcStages : dirtySrcStages;
8964 			dstStages	= dstStages ? dstStages : dirtyDstStages;
8965 
8966 			if (!srcStages)
8967 				srcStages = dstStages;
8968 
8969 			removeIllegalAccessFlags(dstAccesses, dstStages);
8970 			removeIllegalAccessFlags(srcAccesses, srcStages);
8971 
8972 			PipelineBarrier::Type type;
8973 
8974 			if (op == OP_PIPELINE_BARRIER_IMAGE)
8975 				type = PipelineBarrier::TYPE_IMAGE;
8976 			else if (op == OP_PIPELINE_BARRIER_BUFFER)
8977 				type = PipelineBarrier::TYPE_BUFFER;
8978 			else if (op == OP_PIPELINE_BARRIER_GLOBAL)
8979 				type = PipelineBarrier::TYPE_GLOBAL;
8980 			else
8981 			{
8982 				type = PipelineBarrier::TYPE_LAST;
8983 				DE_FATAL("Unknown op");
8984 			}
8985 
8986 			if (type == PipelineBarrier::TYPE_IMAGE)
8987 				return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::just(state.imageLayout)));
8988 			else
8989 				return de::MovePtr<CmdCommand>(new PipelineBarrier(srcStages, srcAccesses, dstStages, dstAccesses, type, tcu::nothing<vk::VkImageLayout>()));
8990 		}
8991 
8992 		default:
8993 			DE_FATAL("Unknown op");
8994 			return de::MovePtr<CmdCommand>(DE_NULL);
8995 	}
8996 }
8997 
createRenderPassCommand(de::Random &,const State &,Op op)8998 de::MovePtr<RenderPassCommand> createRenderPassCommand (de::Random&,
8999 														const State&,
9000 														Op				op)
9001 {
9002 	switch (op)
9003 	{
9004 		case OP_RENDER_VERTEX_BUFFER:					return de::MovePtr<RenderPassCommand>(new RenderVertexBuffer());
9005 		case OP_RENDER_INDEX_BUFFER:					return de::MovePtr<RenderPassCommand>(new RenderIndexBuffer());
9006 
9007 		case OP_RENDER_VERTEX_UNIFORM_BUFFER:			return de::MovePtr<RenderPassCommand>(new RenderVertexUniformBuffer());
9008 		case OP_RENDER_FRAGMENT_UNIFORM_BUFFER:			return de::MovePtr<RenderPassCommand>(new RenderFragmentUniformBuffer());
9009 
9010 		case OP_RENDER_VERTEX_UNIFORM_TEXEL_BUFFER:		return de::MovePtr<RenderPassCommand>(new RenderVertexUniformTexelBuffer());
9011 		case OP_RENDER_FRAGMENT_UNIFORM_TEXEL_BUFFER:	return de::MovePtr<RenderPassCommand>(new RenderFragmentUniformTexelBuffer());
9012 
9013 		case OP_RENDER_VERTEX_STORAGE_BUFFER:			return de::MovePtr<RenderPassCommand>(new RenderVertexStorageBuffer());
9014 		case OP_RENDER_FRAGMENT_STORAGE_BUFFER:			return de::MovePtr<RenderPassCommand>(new RenderFragmentStorageBuffer());
9015 
9016 		case OP_RENDER_VERTEX_STORAGE_TEXEL_BUFFER:		return de::MovePtr<RenderPassCommand>(new RenderVertexStorageTexelBuffer());
9017 		case OP_RENDER_FRAGMENT_STORAGE_TEXEL_BUFFER:	return de::MovePtr<RenderPassCommand>(new RenderFragmentStorageTexelBuffer());
9018 
9019 		case OP_RENDER_VERTEX_STORAGE_IMAGE:			return de::MovePtr<RenderPassCommand>(new RenderVertexStorageImage());
9020 		case OP_RENDER_FRAGMENT_STORAGE_IMAGE:			return de::MovePtr<RenderPassCommand>(new RenderFragmentStorageImage());
9021 
9022 		case OP_RENDER_VERTEX_SAMPLED_IMAGE:			return de::MovePtr<RenderPassCommand>(new RenderVertexSampledImage());
9023 		case OP_RENDER_FRAGMENT_SAMPLED_IMAGE:			return de::MovePtr<RenderPassCommand>(new RenderFragmentSampledImage());
9024 
9025 		default:
9026 			DE_FATAL("Unknown op");
9027 			return de::MovePtr<RenderPassCommand>(DE_NULL);
9028 	}
9029 }
9030 
createRenderPassCommands(const Memory & memory,de::Random & nextOpRng,State & state,Usage usage,size_t & opNdx,size_t opCount)9031 de::MovePtr<CmdCommand> createRenderPassCommands (const Memory&	memory,
9032 												  de::Random&	nextOpRng,
9033 												  State&		state,
9034 												  Usage			usage,
9035 												  size_t&		opNdx,
9036 												  size_t		opCount)
9037 {
9038 	vector<RenderPassCommand*>	commands;
9039 
9040 	try
9041 	{
9042 		for (; opNdx < opCount; opNdx++)
9043 		{
9044 			vector<Op>	ops;
9045 
9046 			getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
9047 
9048 			DE_ASSERT(!ops.empty());
9049 
9050 			{
9051 				const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
9052 
9053 				if (op == OP_RENDERPASS_END)
9054 				{
9055 					break;
9056 				}
9057 				else
9058 				{
9059 					de::Random	rng	(state.rng);
9060 
9061 					commands.push_back(createRenderPassCommand(rng, state, op).release());
9062 					applyOp(state, memory, op, usage);
9063 
9064 					DE_ASSERT(state.rng == rng);
9065 				}
9066 			}
9067 		}
9068 
9069 		applyOp(state, memory, OP_RENDERPASS_END, usage);
9070 		return de::MovePtr<CmdCommand>(new SubmitRenderPass(commands));
9071 	}
9072 	catch (...)
9073 	{
9074 		for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
9075 			delete commands[commandNdx];
9076 
9077 		throw;
9078 	}
9079 }
9080 
createSecondaryCmdCommands(const Memory & memory,de::Random & nextOpRng,State & state,Usage usage,size_t & opNdx,size_t opCount)9081 de::MovePtr<CmdCommand> createSecondaryCmdCommands (const Memory&	memory,
9082 												    de::Random&		nextOpRng,
9083 												    State&			state,
9084 												    Usage			usage,
9085 												    size_t&			opNdx,
9086 												    size_t			opCount)
9087 {
9088 	vector<CmdCommand*>	commands;
9089 
9090 	try
9091 	{
9092 		for (; opNdx < opCount; opNdx++)
9093 		{
9094 			vector<Op>	ops;
9095 
9096 			getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
9097 
9098 			DE_ASSERT(!ops.empty());
9099 
9100 			{
9101 				const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
9102 
9103 				if (op == OP_SECONDARY_COMMAND_BUFFER_END)
9104 				{
9105 					break;
9106 				}
9107 				else
9108 				{
9109 					de::Random	rng(state.rng);
9110 
9111 					commands.push_back(createCmdCommand(rng, state, op, usage).release());
9112 					applyOp(state, memory, op, usage);
9113 
9114 					DE_ASSERT(state.rng == rng);
9115 				}
9116 			}
9117 		}
9118 
9119 		applyOp(state, memory, OP_SECONDARY_COMMAND_BUFFER_END, usage);
9120 		return de::MovePtr<CmdCommand>(new ExecuteSecondaryCommandBuffer(commands));
9121 	}
9122 	catch (...)
9123 	{
9124 		for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
9125 			delete commands[commandNdx];
9126 
9127 		throw;
9128 	}
9129 }
9130 
createCmdCommands(const Memory & memory,de::Random & nextOpRng,State & state,Usage usage,size_t & opNdx,size_t opCount)9131 de::MovePtr<Command> createCmdCommands (const Memory&	memory,
9132 										de::Random&		nextOpRng,
9133 										State&			state,
9134 										Usage			usage,
9135 										size_t&			opNdx,
9136 										size_t			opCount)
9137 {
9138 	vector<CmdCommand*>	commands;
9139 
9140 	try
9141 	{
9142 		// Insert a mostly-full barrier to order this work wrt previous command buffer.
9143 		commands.push_back(new PipelineBarrier(state.cache.getAllowedStages(),
9144 											   state.cache.getAllowedAcceses(),
9145 											   state.cache.getAllowedStages(),
9146 											   state.cache.getAllowedAcceses(),
9147 											   PipelineBarrier::TYPE_GLOBAL,
9148 											   tcu::nothing<vk::VkImageLayout>()));
9149 
9150 		for (; opNdx < opCount; opNdx++)
9151 		{
9152 			vector<Op>	ops;
9153 
9154 			getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
9155 
9156 			DE_ASSERT(!ops.empty());
9157 
9158 			{
9159 				const Op op = nextOpRng.choose<Op>(ops.begin(), ops.end());
9160 
9161 				if (op == OP_COMMAND_BUFFER_END)
9162 				{
9163 					break;
9164 				}
9165 				else
9166 				{
9167 					// \note Command needs to known the state before the operation
9168 					if (op == OP_RENDERPASS_BEGIN)
9169 					{
9170 						applyOp(state, memory, op, usage);
9171 						commands.push_back(createRenderPassCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
9172 					}
9173 					else if (op == OP_SECONDARY_COMMAND_BUFFER_BEGIN)
9174 					{
9175 						applyOp(state, memory, op, usage);
9176 						commands.push_back(createSecondaryCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
9177 					}
9178 					else
9179 					{
9180 						de::Random	rng	(state.rng);
9181 
9182 						commands.push_back(createCmdCommand(rng, state, op, usage).release());
9183 						applyOp(state, memory, op, usage);
9184 
9185 						DE_ASSERT(state.rng == rng);
9186 					}
9187 
9188 				}
9189 			}
9190 		}
9191 
9192 		applyOp(state, memory, OP_COMMAND_BUFFER_END, usage);
9193 		return de::MovePtr<Command>(new SubmitCommandBuffer(commands));
9194 	}
9195 	catch (...)
9196 	{
9197 		for (size_t commandNdx = 0; commandNdx < commands.size(); commandNdx++)
9198 			delete commands[commandNdx];
9199 
9200 		throw;
9201 	}
9202 }
9203 
createCommands(vector<Command * > & commands,deUint32 seed,const Memory & memory,Usage usage,vk::VkSharingMode sharingMode,size_t opCount)9204 void createCommands (vector<Command*>&	commands,
9205 					 deUint32			seed,
9206 					 const Memory&		memory,
9207 					 Usage				usage,
9208 					 vk::VkSharingMode	sharingMode,
9209 					 size_t				opCount)
9210 {
9211 	State			state		(usage, seed);
9212 	// Used to select next operation only
9213 	de::Random		nextOpRng	(seed ^ 12930809);
9214 
9215 	commands.reserve(opCount);
9216 
9217 	for (size_t opNdx = 0; opNdx < opCount; opNdx++)
9218 	{
9219 		vector<Op>	ops;
9220 
9221 		getAvailableOps(state, memory.getSupportBuffers(), memory.getSupportImages(), usage, ops);
9222 
9223 		DE_ASSERT(!ops.empty());
9224 
9225 		{
9226 			const Op	op	= nextOpRng.choose<Op>(ops.begin(), ops.end());
9227 
9228 			if (op == OP_COMMAND_BUFFER_BEGIN)
9229 			{
9230 				applyOp(state, memory, op, usage);
9231 				commands.push_back(createCmdCommands(memory, nextOpRng, state, usage, opNdx, opCount).release());
9232 			}
9233 			else
9234 			{
9235 				de::Random	rng	(state.rng);
9236 
9237 				commands.push_back(createHostCommand(op, rng, usage, sharingMode).release());
9238 				applyOp(state, memory, op, usage);
9239 
9240 				// Make sure that random generator is in sync
9241 				DE_ASSERT(state.rng == rng);
9242 			}
9243 		}
9244 	}
9245 
9246 	// Clean up resources
9247 	if (state.hasBuffer && state.hasImage)
9248 	{
9249 		if (!state.queueIdle)
9250 			commands.push_back(new QueueWaitIdle());
9251 
9252 		if (state.hasBuffer)
9253 			commands.push_back(new DestroyBuffer());
9254 
9255 		if (state.hasImage)
9256 			commands.push_back(new DestroyImage());
9257 	}
9258 }
9259 
9260 class MemoryTestInstance : public TestInstance
9261 {
9262 public:
9263 
9264 	typedef bool(MemoryTestInstance::*StageFunc)(void);
9265 
9266 												MemoryTestInstance				(::vkt::Context& context, const TestConfig& config);
9267 												~MemoryTestInstance				(void);
9268 
9269 	tcu::TestStatus								iterate							(void);
9270 
9271 private:
9272 	const TestConfig							m_config;
9273 	const size_t								m_iterationCount;
9274 	const size_t								m_opCount;
9275 	const vk::VkPhysicalDeviceMemoryProperties	m_memoryProperties;
9276 	deUint32									m_memoryTypeNdx;
9277 	size_t										m_iteration;
9278 	StageFunc									m_stage;
9279 	tcu::ResultCollector						m_resultCollector;
9280 
9281 	vector<Command*>							m_commands;
9282 	MovePtr<Memory>								m_memory;
9283 	MovePtr<Context>							m_renderContext;
9284 	MovePtr<PrepareContext>						m_prepareContext;
9285 
9286 	bool										nextIteration					(void);
9287 	bool										nextMemoryType					(void);
9288 
9289 	bool										createCommandsAndAllocateMemory	(void);
9290 	bool										prepare							(void);
9291 	bool										execute							(void);
9292 	bool										verify							(void);
9293 	void										resetResources					(void);
9294 };
9295 
resetResources(void)9296 void MemoryTestInstance::resetResources (void)
9297 {
9298 	const vk::DeviceInterface&	vkd		= m_context.getDeviceInterface();
9299 	const vk::VkDevice			device	= m_context.getDevice();
9300 
9301 	VK_CHECK(vkd.deviceWaitIdle(device));
9302 
9303 	for (size_t commandNdx = 0; commandNdx < m_commands.size(); commandNdx++)
9304 	{
9305 		delete m_commands[commandNdx];
9306 		m_commands[commandNdx] = DE_NULL;
9307 	}
9308 
9309 	m_commands.clear();
9310 	m_prepareContext.clear();
9311 	m_memory.clear();
9312 }
9313 
nextIteration(void)9314 bool MemoryTestInstance::nextIteration (void)
9315 {
9316 	m_iteration++;
9317 
9318 	if (m_iteration < m_iterationCount)
9319 	{
9320 		resetResources();
9321 		m_stage = &MemoryTestInstance::createCommandsAndAllocateMemory;
9322 		return true;
9323 	}
9324 	else
9325 		return nextMemoryType();
9326 }
9327 
nextMemoryType(void)9328 bool MemoryTestInstance::nextMemoryType (void)
9329 {
9330 	resetResources();
9331 
9332 	DE_ASSERT(m_commands.empty());
9333 
9334 	m_memoryTypeNdx++;
9335 
9336 	if (m_memoryTypeNdx < m_memoryProperties.memoryTypeCount)
9337 	{
9338 		m_iteration	= 0;
9339 		m_stage		= &MemoryTestInstance::createCommandsAndAllocateMemory;
9340 
9341 		return true;
9342 	}
9343 	else
9344 	{
9345 		m_stage = DE_NULL;
9346 		return false;
9347 	}
9348 }
9349 
MemoryTestInstance(::vkt::Context & context,const TestConfig & config)9350 MemoryTestInstance::MemoryTestInstance (::vkt::Context& context, const TestConfig& config)
9351 	: TestInstance			(context)
9352 	, m_config				(config)
9353 	, m_iterationCount		(5)
9354 	, m_opCount				(50)
9355 	, m_memoryProperties	(vk::getPhysicalDeviceMemoryProperties(context.getInstanceInterface(), context.getPhysicalDevice()))
9356 	, m_memoryTypeNdx		(0)
9357 	, m_iteration			(0)
9358 	, m_stage				(&MemoryTestInstance::createCommandsAndAllocateMemory)
9359 	, m_resultCollector		(context.getTestContext().getLog())
9360 
9361 	, m_memory				(DE_NULL)
9362 {
9363 	TestLog&	log	= context.getTestContext().getLog();
9364 	{
9365 		const tcu::ScopedLogSection section (log, "TestCaseInfo", "Test Case Info");
9366 
9367 		log << TestLog::Message << "Buffer size: " << config.size << TestLog::EndMessage;
9368 		log << TestLog::Message << "Sharing: " << config.sharing << TestLog::EndMessage;
9369 		log << TestLog::Message << "Access: " << config.usage << TestLog::EndMessage;
9370 	}
9371 
9372 	{
9373 		const tcu::ScopedLogSection section (log, "MemoryProperties", "Memory Properties");
9374 
9375 		for (deUint32 heapNdx = 0; heapNdx < m_memoryProperties.memoryHeapCount; heapNdx++)
9376 		{
9377 			const tcu::ScopedLogSection heapSection (log, "Heap" + de::toString(heapNdx), "Heap " + de::toString(heapNdx));
9378 
9379 			log << TestLog::Message << "Size: " << m_memoryProperties.memoryHeaps[heapNdx].size << TestLog::EndMessage;
9380 			log << TestLog::Message << "Flags: " << m_memoryProperties.memoryHeaps[heapNdx].flags << TestLog::EndMessage;
9381 		}
9382 
9383 		for (deUint32 memoryTypeNdx = 0; memoryTypeNdx < m_memoryProperties.memoryTypeCount; memoryTypeNdx++)
9384 		{
9385 			const tcu::ScopedLogSection memoryTypeSection (log, "MemoryType" + de::toString(memoryTypeNdx), "Memory type " + de::toString(memoryTypeNdx));
9386 
9387 			log << TestLog::Message << "Properties: " << m_memoryProperties.memoryTypes[memoryTypeNdx].propertyFlags << TestLog::EndMessage;
9388 			log << TestLog::Message << "Heap: " << m_memoryProperties.memoryTypes[memoryTypeNdx].heapIndex << TestLog::EndMessage;
9389 		}
9390 	}
9391 
9392 	{
9393 		const vk::InstanceInterface&			vki					= context.getInstanceInterface();
9394 		const vk::VkPhysicalDevice				physicalDevice		= context.getPhysicalDevice();
9395 		const vk::DeviceInterface&				vkd					= context.getDeviceInterface();
9396 		const vk::VkDevice						device				= context.getDevice();
9397 		const vk::VkQueue						queue				= context.getUniversalQueue();
9398 		const deUint32							queueFamilyIndex	= context.getUniversalQueueFamilyIndex();
9399 		vector<pair<deUint32, vk::VkQueue> >	queues;
9400 
9401 		queues.push_back(std::make_pair(queueFamilyIndex, queue));
9402 
9403 		m_renderContext = MovePtr<Context>(new Context(vki, vkd, physicalDevice, device, queue, queueFamilyIndex, queues, context.getBinaryCollection()));
9404 	}
9405 }
9406 
~MemoryTestInstance(void)9407 MemoryTestInstance::~MemoryTestInstance (void)
9408 {
9409 	resetResources();
9410 }
9411 
createCommandsAndAllocateMemory(void)9412 bool MemoryTestInstance::createCommandsAndAllocateMemory (void)
9413 {
9414 	const vk::VkDevice							device				= m_context.getDevice();
9415 	TestLog&									log					= m_context.getTestContext().getLog();
9416 	const vk::InstanceInterface&				vki					= m_context.getInstanceInterface();
9417 	const vk::VkPhysicalDevice					physicalDevice		= m_context.getPhysicalDevice();
9418 	const vk::DeviceInterface&					vkd					= m_context.getDeviceInterface();
9419 	const vk::VkPhysicalDeviceMemoryProperties	memoryProperties	= vk::getPhysicalDeviceMemoryProperties(vki, physicalDevice);
9420 	const tcu::ScopedLogSection					section				(log, "MemoryType" + de::toString(m_memoryTypeNdx) + "CreateCommands" + de::toString(m_iteration),
9421 																		  "Memory type " + de::toString(m_memoryTypeNdx) + " create commands iteration " + de::toString(m_iteration));
9422 	const vector<deUint32>&						queues				= m_renderContext->getQueueFamilies();
9423 
9424 	DE_ASSERT(m_commands.empty());
9425 
9426 	if (m_config.usage & (USAGE_HOST_READ | USAGE_HOST_WRITE)
9427 		&& !(memoryProperties.memoryTypes[m_memoryTypeNdx].propertyFlags & vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT))
9428 	{
9429 		log << TestLog::Message << "Memory type not supported" << TestLog::EndMessage;
9430 
9431 		return nextMemoryType();
9432 	}
9433 	else
9434 	{
9435 		try
9436 		{
9437 			const vk::VkBufferUsageFlags	bufferUsage		= usageToBufferUsageFlags(m_config.usage);
9438 			const vk::VkImageUsageFlags		imageUsage		= usageToImageUsageFlags(m_config.usage);
9439 			const vk::VkDeviceSize			maxBufferSize	= bufferUsage != 0
9440 															? roundBufferSizeToWxHx4(findMaxBufferSize(vkd, device, bufferUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx))
9441 															: 0;
9442 			const IVec2						maxImageSize	= imageUsage != 0
9443 															? findMaxRGBA8ImageSize(vkd, device, imageUsage, m_config.sharing, queues, m_config.size, m_memoryTypeNdx)
9444 															: IVec2(0, 0);
9445 
9446 			log << TestLog::Message << "Max buffer size: " << maxBufferSize << TestLog::EndMessage;
9447 			log << TestLog::Message << "Max RGBA8 image size: " << maxImageSize << TestLog::EndMessage;
9448 
9449 			// Skip tests if there are no supported operations
9450 			if (maxBufferSize == 0
9451 				&& maxImageSize[0] == 0
9452 				&& (m_config.usage & (USAGE_HOST_READ|USAGE_HOST_WRITE)) == 0)
9453 			{
9454 				log << TestLog::Message << "Skipping memory type. None of the usages are supported." << TestLog::EndMessage;
9455 
9456 				return nextMemoryType();
9457 			}
9458 			else
9459 			{
9460 				const deUint32	seed	= 2830980989u ^ deUint32Hash((deUint32)(m_iteration) * m_memoryProperties.memoryTypeCount +  m_memoryTypeNdx);
9461 
9462 				m_memory	= MovePtr<Memory>(new Memory(vki, vkd, physicalDevice, device, m_config.size, m_memoryTypeNdx, maxBufferSize, maxImageSize[0], maxImageSize[1]));
9463 
9464 				log << TestLog::Message << "Create commands" << TestLog::EndMessage;
9465 				createCommands(m_commands, seed, *m_memory, m_config.usage, m_config.sharing, m_opCount);
9466 
9467 				m_stage = &MemoryTestInstance::prepare;
9468 				return true;
9469 			}
9470 		}
9471 		catch (const tcu::TestError& e)
9472 		{
9473 			m_resultCollector.fail("Failed, got exception: " + string(e.getMessage()));
9474 			return nextMemoryType();
9475 		}
9476 	}
9477 }
9478 
prepare(void)9479 bool MemoryTestInstance::prepare (void)
9480 {
9481 	TestLog&					log		= m_context.getTestContext().getLog();
9482 	const tcu::ScopedLogSection	section	(log, "MemoryType" + de::toString(m_memoryTypeNdx) + "Prepare" + de::toString(m_iteration),
9483 											  "Memory type " + de::toString(m_memoryTypeNdx) + " prepare iteration" + de::toString(m_iteration));
9484 
9485 	m_prepareContext = MovePtr<PrepareContext>(new PrepareContext(*m_renderContext, *m_memory));
9486 
9487 	DE_ASSERT(!m_commands.empty());
9488 
9489 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
9490 	{
9491 		Command& command = *m_commands[cmdNdx];
9492 
9493 		try
9494 		{
9495 			command.prepare(*m_prepareContext);
9496 		}
9497 		catch (const tcu::TestError& e)
9498 		{
9499 			m_resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to prepare, got exception: " + string(e.getMessage()));
9500 			return nextMemoryType();
9501 		}
9502 	}
9503 
9504 	m_stage = &MemoryTestInstance::execute;
9505 	return true;
9506 }
9507 
execute(void)9508 bool MemoryTestInstance::execute (void)
9509 {
9510 	TestLog&					log				= m_context.getTestContext().getLog();
9511 	const tcu::ScopedLogSection	section			(log, "MemoryType" + de::toString(m_memoryTypeNdx) + "Execute" + de::toString(m_iteration),
9512 													  "Memory type " + de::toString(m_memoryTypeNdx) + " execute iteration " + de::toString(m_iteration));
9513 	ExecuteContext				executeContext	(*m_renderContext);
9514 	const vk::VkDevice			device			= m_context.getDevice();
9515 	const vk::DeviceInterface&	vkd				= m_context.getDeviceInterface();
9516 
9517 	DE_ASSERT(!m_commands.empty());
9518 
9519 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
9520 	{
9521 		Command& command = *m_commands[cmdNdx];
9522 
9523 		try
9524 		{
9525 			command.execute(executeContext);
9526 		}
9527 		catch (const tcu::TestError& e)
9528 		{
9529 			m_resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to execute, got exception: " + string(e.getMessage()));
9530 			return nextIteration();
9531 		}
9532 	}
9533 
9534 	VK_CHECK(vkd.deviceWaitIdle(device));
9535 
9536 	m_stage = &MemoryTestInstance::verify;
9537 	return true;
9538 }
9539 
verify(void)9540 bool MemoryTestInstance::verify (void)
9541 {
9542 	DE_ASSERT(!m_commands.empty());
9543 
9544 	TestLog&					log				= m_context.getTestContext().getLog();
9545 	const tcu::ScopedLogSection	section			(log, "MemoryType" + de::toString(m_memoryTypeNdx) + "Verify" + de::toString(m_iteration),
9546 													  "Memory type " + de::toString(m_memoryTypeNdx) + " verify iteration " + de::toString(m_iteration));
9547 	VerifyContext				verifyContext	(log, m_resultCollector, *m_renderContext, m_config.size);
9548 
9549 	log << TestLog::Message << "Begin verify" << TestLog::EndMessage;
9550 
9551 	for (size_t cmdNdx = 0; cmdNdx < m_commands.size(); cmdNdx++)
9552 	{
9553 		Command& command = *m_commands[cmdNdx];
9554 
9555 		try
9556 		{
9557 			command.verify(verifyContext, cmdNdx);
9558 		}
9559 		catch (const tcu::TestError& e)
9560 		{
9561 			m_resultCollector.fail(de::toString(cmdNdx) + ":" + command.getName() + " failed to verify, got exception: " + string(e.getMessage()));
9562 			return nextIteration();
9563 		}
9564 	}
9565 
9566 	return nextIteration();
9567 }
9568 
iterate(void)9569 tcu::TestStatus MemoryTestInstance::iterate (void)
9570 {
9571 	if ((this->*m_stage)())
9572 		return tcu::TestStatus::incomplete();
9573 	else
9574 		return tcu::TestStatus(m_resultCollector.getResult(), m_resultCollector.getMessage());
9575 }
9576 
9577 struct AddPrograms
9578 {
initvkt::memory::__anon09d9cc850111::AddPrograms9579 	void init (vk::SourceCollections& sources, TestConfig config) const
9580 	{
9581 		// Vertex buffer rendering
9582 		if (config.usage & USAGE_VERTEX_BUFFER)
9583 		{
9584 			const char* const vertexShader =
9585 				"#version 310 es\n"
9586 				"layout(location = 0) in highp vec2 a_position;\n"
9587 				"void main (void) {\n"
9588 				"\tgl_PointSize = 1.0;\n"
9589 				"\tgl_Position = vec4(1.998 * a_position - vec2(0.999), 0.0, 1.0);\n"
9590 				"}\n";
9591 
9592 			sources.glslSources.add("vertex-buffer.vert")
9593 				<< glu::VertexSource(vertexShader);
9594 		}
9595 
9596 		// Index buffer rendering
9597 		if (config.usage & USAGE_INDEX_BUFFER)
9598 		{
9599 			const char* const vertexShader =
9600 				"#version 310 es\n"
9601 				"precision highp float;\n"
9602 				"void main (void) {\n"
9603 				"\tgl_PointSize = 1.0;\n"
9604 				"\thighp vec2 pos = vec2(gl_VertexIndex % 256, gl_VertexIndex / 256) / vec2(255.0);\n"
9605 				"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
9606 				"}\n";
9607 
9608 			sources.glslSources.add("index-buffer.vert")
9609 				<< glu::VertexSource(vertexShader);
9610 		}
9611 
9612 		if (config.usage & USAGE_UNIFORM_BUFFER)
9613 		{
9614 			{
9615 				std::ostringstream vertexShader;
9616 
9617 				vertexShader <<
9618 					"#version 310 es\n"
9619 					"precision highp float;\n"
9620 					"layout(set=0, binding=0) uniform Block\n"
9621 					"{\n"
9622 					"\thighp uvec4 values[" << de::toString<size_t>(MAX_UNIFORM_BUFFER_SIZE / (sizeof(deUint32) * 4)) << "];\n"
9623 					"} block;\n"
9624 					"void main (void) {\n"
9625 					"\tgl_PointSize = 1.0;\n"
9626 					"\thighp uvec4 vecVal = block.values[gl_VertexIndex / 8];\n"
9627 					"\thighp uint val;\n"
9628 					"\tif (((gl_VertexIndex / 2) % 4 == 0))\n"
9629 					"\t\tval = vecVal.x;\n"
9630 					"\telse if (((gl_VertexIndex / 2) % 4 == 1))\n"
9631 					"\t\tval = vecVal.y;\n"
9632 					"\telse if (((gl_VertexIndex / 2) % 4 == 2))\n"
9633 					"\t\tval = vecVal.z;\n"
9634 					"\telse if (((gl_VertexIndex / 2) % 4 == 3))\n"
9635 					"\t\tval = vecVal.w;\n"
9636 					"\tif ((gl_VertexIndex % 2) == 0)\n"
9637 					"\t\tval = val & 0xFFFFu;\n"
9638 					"\telse\n"
9639 					"\t\tval = val >> 16u;\n"
9640 					"\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
9641 					"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
9642 					"}\n";
9643 
9644 				sources.glslSources.add("uniform-buffer.vert")
9645 					<< glu::VertexSource(vertexShader.str());
9646 			}
9647 
9648 			{
9649 				const size_t		arraySize		= MAX_UNIFORM_BUFFER_SIZE / (sizeof(deUint32) * 4);
9650 				const size_t		arrayIntSize	= arraySize * 4;
9651 				std::ostringstream	fragmentShader;
9652 
9653 				fragmentShader <<
9654 					"#version 310 es\n"
9655 					"precision highp float;\n"
9656 					"precision highp int;\n"
9657 					"layout(location = 0) out highp vec4 o_color;\n"
9658 					"layout(set=0, binding=0) uniform Block\n"
9659 					"{\n"
9660 					"\thighp uvec4 values[" << arraySize << "];\n"
9661 					"} block;\n"
9662 					"layout(push_constant) uniform PushC\n"
9663 					"{\n"
9664 					"\tuint callId;\n"
9665 					"\tuint valuesPerPixel;\n"
9666 					"} pushC;\n"
9667 					"void main (void) {\n"
9668 					"\thighp uint id = pushC.callId * (" << arrayIntSize << "u / pushC.valuesPerPixel) + uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x);\n"
9669 					"\tif (uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x) < pushC.callId * (" << arrayIntSize  << "u / pushC.valuesPerPixel))\n"
9670 					"\t\tdiscard;\n"
9671 					"\thighp uint value = id;\n"
9672 					"\tfor (uint i = 0u; i < pushC.valuesPerPixel; i++)\n"
9673 					"\t{\n"
9674 					"\t\thighp uvec4 vecVal = block.values[(value / 4u) % " << arraySize << "u];\n"
9675 					"\t\tif ((value % 4u) == 0u)\n"
9676 					"\t\t\tvalue = vecVal.x;\n"
9677 					"\t\telse if ((value % 4u) == 1u)\n"
9678 					"\t\t\tvalue = vecVal.y;\n"
9679 					"\t\telse if ((value % 4u) == 2u)\n"
9680 					"\t\t\tvalue = vecVal.z;\n"
9681 					"\t\telse if ((value % 4u) == 3u)\n"
9682 					"\t\t\tvalue = vecVal.w;\n"
9683 					"\t}\n"
9684 					"\tuvec4 valueOut = uvec4(value & 0xFFu, (value >> 8u) & 0xFFu, (value >> 16u) & 0xFFu, (value >> 24u) & 0xFFu);\n"
9685 					"\to_color = vec4(valueOut) / vec4(255.0);\n"
9686 					"}\n";
9687 
9688 				sources.glslSources.add("uniform-buffer.frag")
9689 					<< glu::FragmentSource(fragmentShader.str());
9690 			}
9691 		}
9692 
9693 		if (config.usage & USAGE_STORAGE_BUFFER)
9694 		{
9695 			{
9696 				// Vertex storage buffer rendering
9697 				const char* const vertexShader =
9698 					"#version 310 es\n"
9699 					"precision highp float;\n"
9700 					"readonly layout(set=0, binding=0) buffer Block\n"
9701 					"{\n"
9702 					"\thighp uvec4 values[];\n"
9703 					"} block;\n"
9704 					"void main (void) {\n"
9705 					"\tgl_PointSize = 1.0;\n"
9706 					"\thighp uvec4 vecVal = block.values[gl_VertexIndex / 8];\n"
9707 					"\thighp uint val;\n"
9708 					"\tif (((gl_VertexIndex / 2) % 4 == 0))\n"
9709 					"\t\tval = vecVal.x;\n"
9710 					"\telse if (((gl_VertexIndex / 2) % 4 == 1))\n"
9711 					"\t\tval = vecVal.y;\n"
9712 					"\telse if (((gl_VertexIndex / 2) % 4 == 2))\n"
9713 					"\t\tval = vecVal.z;\n"
9714 					"\telse if (((gl_VertexIndex / 2) % 4 == 3))\n"
9715 					"\t\tval = vecVal.w;\n"
9716 					"\tif ((gl_VertexIndex % 2) == 0)\n"
9717 					"\t\tval = val & 0xFFFFu;\n"
9718 					"\telse\n"
9719 					"\t\tval = val >> 16u;\n"
9720 					"\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
9721 					"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
9722 					"}\n";
9723 
9724 				sources.glslSources.add("storage-buffer.vert")
9725 					<< glu::VertexSource(vertexShader);
9726 			}
9727 
9728 			{
9729 				std::ostringstream	fragmentShader;
9730 
9731 				fragmentShader <<
9732 					"#version 310 es\n"
9733 					"precision highp float;\n"
9734 					"precision highp int;\n"
9735 					"layout(location = 0) out highp vec4 o_color;\n"
9736 					"layout(set=0, binding=0) buffer Block\n"
9737 					"{\n"
9738 					"\thighp uvec4 values[];\n"
9739 					"} block;\n"
9740 					"layout(push_constant) uniform PushC\n"
9741 					"{\n"
9742 					"\tuint valuesPerPixel;\n"
9743 					"\tuint bufferSize;\n"
9744 					"} pushC;\n"
9745 					"void main (void) {\n"
9746 					"\thighp uint arrayIntSize = pushC.bufferSize / 4u;\n"
9747 					"\thighp uint id = uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x);\n"
9748 					"\thighp uint value = id;\n"
9749 					"\tfor (uint i = 0u; i < pushC.valuesPerPixel; i++)\n"
9750 					"\t{\n"
9751 					"\t\thighp uvec4 vecVal = block.values[(value / 4u) % (arrayIntSize / 4u)];\n"
9752 					"\t\tif ((value % 4u) == 0u)\n"
9753 					"\t\t\tvalue = vecVal.x;\n"
9754 					"\t\telse if ((value % 4u) == 1u)\n"
9755 					"\t\t\tvalue = vecVal.y;\n"
9756 					"\t\telse if ((value % 4u) == 2u)\n"
9757 					"\t\t\tvalue = vecVal.z;\n"
9758 					"\t\telse if ((value % 4u) == 3u)\n"
9759 					"\t\t\tvalue = vecVal.w;\n"
9760 					"\t}\n"
9761 					"\tuvec4 valueOut = uvec4(value & 0xFFu, (value >> 8u) & 0xFFu, (value >> 16u) & 0xFFu, (value >> 24u) & 0xFFu);\n"
9762 					"\to_color = vec4(valueOut) / vec4(255.0);\n"
9763 					"}\n";
9764 
9765 				sources.glslSources.add("storage-buffer.frag")
9766 					<< glu::FragmentSource(fragmentShader.str());
9767 			}
9768 		}
9769 
9770 		if (config.usage & USAGE_UNIFORM_TEXEL_BUFFER)
9771 		{
9772 			{
9773 				// Vertex uniform texel buffer rendering
9774 				const char* const vertexShader =
9775 					"#version 310 es\n"
9776 					"#extension GL_EXT_texture_buffer : require\n"
9777 					"precision highp float;\n"
9778 					"layout(set=0, binding=0) uniform highp utextureBuffer u_sampler;\n"
9779 					"void main (void) {\n"
9780 					"\tgl_PointSize = 1.0;\n"
9781 					"\thighp uint val = texelFetch(u_sampler, gl_VertexIndex).x;\n"
9782 					"\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
9783 					"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
9784 					"}\n";
9785 
9786 				sources.glslSources.add("uniform-texel-buffer.vert")
9787 					<< glu::VertexSource(vertexShader);
9788 			}
9789 
9790 			{
9791 				// Fragment uniform texel buffer rendering
9792 				const char* const fragmentShader =
9793 					"#version 310 es\n"
9794 					"#extension GL_EXT_texture_buffer : require\n"
9795 					"#extension GL_EXT_samplerless_texture_functions : require\n"
9796 					"precision highp float;\n"
9797 					"precision highp int;\n"
9798 					"layout(set=0, binding=0) uniform highp utextureBuffer u_sampler;\n"
9799 					"layout(location = 0) out highp vec4 o_color;\n"
9800 					"layout(push_constant) uniform PushC\n"
9801 					"{\n"
9802 					"\tuint callId;\n"
9803 					"\tuint valuesPerPixel;\n"
9804 					"\tuint maxTexelCount;\n"
9805 					"} pushC;\n"
9806 					"void main (void) {\n"
9807 					"\thighp uint id = uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x);\n"
9808 					"\thighp uint value = id;\n"
9809 					"\tif (uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x) < pushC.callId * (pushC.maxTexelCount / pushC.valuesPerPixel))\n"
9810 					"\t\tdiscard;\n"
9811 					"\tfor (uint i = 0u; i < pushC.valuesPerPixel; i++)\n"
9812 					"\t{\n"
9813 					"\t\tvalue = texelFetch(u_sampler, int(value % uint(textureSize(u_sampler)))).x;\n"
9814 					"\t}\n"
9815 					"\tuvec4 valueOut = uvec4(value & 0xFFu, (value >> 8u) & 0xFFu, (value >> 16u) & 0xFFu, (value >> 24u) & 0xFFu);\n"
9816 					"\to_color = vec4(valueOut) / vec4(255.0);\n"
9817 					"}\n";
9818 
9819 				sources.glslSources.add("uniform-texel-buffer.frag")
9820 					<< glu::FragmentSource(fragmentShader);
9821 			}
9822 		}
9823 
9824 		if (config.usage & USAGE_STORAGE_TEXEL_BUFFER)
9825 		{
9826 			{
9827 				// Vertex storage texel buffer rendering
9828 				const char* const vertexShader =
9829 					"#version 450\n"
9830 					"#extension GL_EXT_texture_buffer : require\n"
9831 					"precision highp float;\n"
9832 					"layout(set=0, binding=0, r32ui) uniform readonly highp uimageBuffer u_sampler;\n"
9833 					"out gl_PerVertex {\n"
9834 					"\tvec4 gl_Position;\n"
9835 					"\tfloat gl_PointSize;\n"
9836 					"};\n"
9837 					"void main (void) {\n"
9838 					"\tgl_PointSize = 1.0;\n"
9839 					"\thighp uint val = imageLoad(u_sampler, gl_VertexIndex / 2).x;\n"
9840 					"\tif (gl_VertexIndex % 2 == 0)\n"
9841 					"\t\tval = val & 0xFFFFu;\n"
9842 					"\telse\n"
9843 					"\t\tval = val >> 16;\n"
9844 					"\thighp vec2 pos = vec2(val & 0xFFu, val >> 8u) / vec2(255.0);\n"
9845 					"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
9846 					"}\n";
9847 
9848 				sources.glslSources.add("storage-texel-buffer.vert")
9849 					<< glu::VertexSource(vertexShader);
9850 			}
9851 			{
9852 				// Fragment storage texel buffer rendering
9853 				const char* const fragmentShader =
9854 					"#version 310 es\n"
9855 					"#extension GL_EXT_texture_buffer : require\n"
9856 					"precision highp float;\n"
9857 					"precision highp int;\n"
9858 					"layout(set=0, binding=0, r32ui) uniform readonly highp uimageBuffer u_sampler;\n"
9859 					"layout(location = 0) out highp vec4 o_color;\n"
9860 					"layout(push_constant) uniform PushC\n"
9861 					"{\n"
9862 					"\tuint callId;\n"
9863 					"\tuint valuesPerPixel;\n"
9864 					"\tuint maxTexelCount;\n"
9865 					"\tuint width;\n"
9866 					"} pushC;\n"
9867 					"void main (void) {\n"
9868 					"\thighp uint id = uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x);\n"
9869 					"\thighp uint value = id;\n"
9870 					"\tif (uint(gl_FragCoord.y) * 256u + uint(gl_FragCoord.x) < pushC.callId * (pushC.maxTexelCount / pushC.valuesPerPixel))\n"
9871 					"\t\tdiscard;\n"
9872 					"\tfor (uint i = 0u; i < pushC.valuesPerPixel; i++)\n"
9873 					"\t{\n"
9874 					"\t\tvalue = imageLoad(u_sampler, int(value % pushC.width)).x;\n"
9875 					"\t}\n"
9876 					"\tuvec4 valueOut = uvec4(value & 0xFFu, (value >> 8u) & 0xFFu, (value >> 16u) & 0xFFu, (value >> 24u) & 0xFFu);\n"
9877 					"\to_color = vec4(valueOut) / vec4(255.0);\n"
9878 					"}\n";
9879 
9880 				sources.glslSources.add("storage-texel-buffer.frag")
9881 					<< glu::FragmentSource(fragmentShader);
9882 			}
9883 		}
9884 
9885 		if (config.usage & USAGE_STORAGE_IMAGE)
9886 		{
9887 			{
9888 				// Vertex storage image
9889 				const char* const vertexShader =
9890 					"#version 450\n"
9891 					"precision highp float;\n"
9892 					"layout(set=0, binding=0, rgba8) uniform image2D u_image;\n"
9893 					"out gl_PerVertex {\n"
9894 					"\tvec4 gl_Position;\n"
9895 					"\tfloat gl_PointSize;\n"
9896 					"};\n"
9897 					"void main (void) {\n"
9898 					"\tgl_PointSize = 1.0;\n"
9899 					"\thighp vec4 val = imageLoad(u_image, ivec2((gl_VertexIndex / 2) / imageSize(u_image).x, (gl_VertexIndex / 2) % imageSize(u_image).x));\n"
9900 					"\thighp vec2 pos;\n"
9901 					"\tif (gl_VertexIndex % 2 == 0)\n"
9902 					"\t\tpos = val.xy;\n"
9903 					"\telse\n"
9904 					"\t\tpos = val.zw;\n"
9905 					"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
9906 					"}\n";
9907 
9908 				sources.glslSources.add("storage-image.vert")
9909 					<< glu::VertexSource(vertexShader);
9910 			}
9911 			{
9912 				// Fragment storage image
9913 				const char* const fragmentShader =
9914 					"#version 450\n"
9915 					"#extension GL_EXT_texture_buffer : require\n"
9916 					"precision highp float;\n"
9917 					"layout(set=0, binding=0, rgba8) uniform image2D u_image;\n"
9918 					"layout(location = 0) out highp vec4 o_color;\n"
9919 					"void main (void) {\n"
9920 					"\thighp uvec2 size = uvec2(imageSize(u_image).x, imageSize(u_image).y);\n"
9921 					"\thighp uint valuesPerPixel = max(1u, (size.x * size.y) / (256u * 256u));\n"
9922 					"\thighp uvec4 value = uvec4(uint(gl_FragCoord.x), uint(gl_FragCoord.y), 0u, 0u);\n"
9923 					"\tfor (uint i = 0u; i < valuesPerPixel; i++)\n"
9924 					"\t{\n"
9925 					"\t\thighp vec4 floatValue = imageLoad(u_image, ivec2(int((value.z *  256u + (value.x ^ value.z)) % size.x), int((value.w * 256u + (value.y ^ value.w)) % size.y)));\n"
9926 					"\t\tvalue = uvec4(uint(floatValue.x * 255.0), uint(floatValue.y * 255.0), uint(floatValue.z * 255.0), uint(floatValue.w * 255.0));\n"
9927 					"\t}\n"
9928 					"\to_color = vec4(value) / vec4(255.0);\n"
9929 					"}\n";
9930 
9931 				sources.glslSources.add("storage-image.frag")
9932 					<< glu::FragmentSource(fragmentShader);
9933 			}
9934 		}
9935 
9936 		if (config.usage & USAGE_SAMPLED_IMAGE)
9937 		{
9938 			{
9939 				// Vertex storage image
9940 				const char* const vertexShader =
9941 					"#version 450\n"
9942 					"precision highp float;\n"
9943 					"layout(set=0, binding=0) uniform sampler2D u_sampler;\n"
9944 					"out gl_PerVertex {\n"
9945 					"\tvec4 gl_Position;\n"
9946 					"\tfloat gl_PointSize;\n"
9947 					"};\n"
9948 					"void main (void) {\n"
9949 					"\tgl_PointSize = 1.0;\n"
9950 					"\thighp vec4 val = texelFetch(u_sampler, ivec2((gl_VertexIndex / 2) / textureSize(u_sampler, 0).x, (gl_VertexIndex / 2) % textureSize(u_sampler, 0).x), 0);\n"
9951 					"\thighp vec2 pos;\n"
9952 					"\tif (gl_VertexIndex % 2 == 0)\n"
9953 					"\t\tpos = val.xy;\n"
9954 					"\telse\n"
9955 					"\t\tpos = val.zw;\n"
9956 					"\tgl_Position = vec4(1.998 * pos - vec2(0.999), 0.0, 1.0);\n"
9957 					"}\n";
9958 
9959 				sources.glslSources.add("sampled-image.vert")
9960 					<< glu::VertexSource(vertexShader);
9961 			}
9962 			{
9963 				// Fragment storage image
9964 				const char* const fragmentShader =
9965 					"#version 450\n"
9966 					"#extension GL_EXT_texture_buffer : require\n"
9967 					"precision highp float;\n"
9968 					"layout(set=0, binding=0) uniform sampler2D u_sampler;\n"
9969 					"layout(location = 0) out highp vec4 o_color;\n"
9970 					"void main (void) {\n"
9971 					"\thighp uvec2 size = uvec2(textureSize(u_sampler, 0).x, textureSize(u_sampler, 0).y);\n"
9972 					"\thighp uint valuesPerPixel = max(1u, (size.x * size.y) / (256u * 256u));\n"
9973 					"\thighp uvec4 value = uvec4(uint(gl_FragCoord.x), uint(gl_FragCoord.y), 0u, 0u);\n"
9974 					"\tfor (uint i = 0u; i < valuesPerPixel; i++)\n"
9975 					"\t{\n"
9976 					"\t\thighp vec4 floatValue = texelFetch(u_sampler, ivec2(int((value.z *  256u + (value.x ^ value.z)) % size.x), int((value.w * 256u + (value.y ^ value.w)) % size.y)), 0);\n"
9977 					"\t\tvalue = uvec4(uint(floatValue.x * 255.0), uint(floatValue.y * 255.0), uint(floatValue.z * 255.0), uint(floatValue.w * 255.0));\n"
9978 					"\t}\n"
9979 					"\to_color = vec4(value) / vec4(255.0);\n"
9980 					"}\n";
9981 
9982 				sources.glslSources.add("sampled-image.frag")
9983 					<< glu::FragmentSource(fragmentShader);
9984 			}
9985 		}
9986 
9987 		{
9988 			const char* const vertexShader =
9989 				"#version 450\n"
9990 				"out gl_PerVertex {\n"
9991 				"\tvec4 gl_Position;\n"
9992 				"};\n"
9993 				"precision highp float;\n"
9994 				"void main (void) {\n"
9995 				"\tgl_Position = vec4(((gl_VertexIndex + 2) / 3) % 2 == 0 ? -1.0 : 1.0,\n"
9996 				"\t                   ((gl_VertexIndex + 1) / 3) % 2 == 0 ? -1.0 : 1.0, 0.0, 1.0);\n"
9997 				"}\n";
9998 
9999 			sources.glslSources.add("render-quad.vert")
10000 				<< glu::VertexSource(vertexShader);
10001 		}
10002 
10003 		{
10004 			const char* const fragmentShader =
10005 				"#version 310 es\n"
10006 				"layout(location = 0) out highp vec4 o_color;\n"
10007 				"void main (void) {\n"
10008 				"\to_color = vec4(1.0);\n"
10009 				"}\n";
10010 
10011 			sources.glslSources.add("render-white.frag")
10012 				<< glu::FragmentSource(fragmentShader);
10013 		}
10014 	}
10015 };
10016 
10017 } // anonymous
10018 
createPipelineBarrierTests(tcu::TestContext & testCtx)10019 tcu::TestCaseGroup* createPipelineBarrierTests (tcu::TestContext& testCtx)
10020 {
10021 	de::MovePtr<tcu::TestCaseGroup>	group			(new tcu::TestCaseGroup(testCtx, "pipeline_barrier", "Pipeline barrier tests."));
10022 	const vk::VkDeviceSize			sizes[]			=
10023 	{
10024 		1024,			// 1K
10025 		8*1024,			// 8K
10026 		64*1024,		// 64K
10027 		ONE_MEGABYTE,	// 1M
10028 	};
10029 	const Usage						usages[]		=
10030 	{
10031 		USAGE_HOST_READ,
10032 		USAGE_HOST_WRITE,
10033 		USAGE_TRANSFER_SRC,
10034 		USAGE_TRANSFER_DST,
10035 		USAGE_VERTEX_BUFFER,
10036 		USAGE_INDEX_BUFFER,
10037 		USAGE_UNIFORM_BUFFER,
10038 		USAGE_UNIFORM_TEXEL_BUFFER,
10039 		USAGE_STORAGE_BUFFER,
10040 		USAGE_STORAGE_TEXEL_BUFFER,
10041 		USAGE_STORAGE_IMAGE,
10042 		USAGE_SAMPLED_IMAGE
10043 	};
10044 	const Usage						readUsages[]		=
10045 	{
10046 		USAGE_HOST_READ,
10047 		USAGE_TRANSFER_SRC,
10048 		USAGE_VERTEX_BUFFER,
10049 		USAGE_INDEX_BUFFER,
10050 		USAGE_UNIFORM_BUFFER,
10051 		USAGE_UNIFORM_TEXEL_BUFFER,
10052 		USAGE_STORAGE_BUFFER,
10053 		USAGE_STORAGE_TEXEL_BUFFER,
10054 		USAGE_STORAGE_IMAGE,
10055 		USAGE_SAMPLED_IMAGE
10056 	};
10057 
10058 	const Usage						writeUsages[]	=
10059 	{
10060 		USAGE_HOST_WRITE,
10061 		USAGE_TRANSFER_DST
10062 	};
10063 
10064 	for (size_t writeUsageNdx = 0; writeUsageNdx < DE_LENGTH_OF_ARRAY(writeUsages); writeUsageNdx++)
10065 	{
10066 		const Usage	writeUsage	= writeUsages[writeUsageNdx];
10067 
10068 		for (size_t readUsageNdx = 0; readUsageNdx < DE_LENGTH_OF_ARRAY(readUsages); readUsageNdx++)
10069 		{
10070 			const Usage						readUsage		= readUsages[readUsageNdx];
10071 			const Usage						usage			= writeUsage | readUsage;
10072 			const string					usageGroupName	(usageToName(usage));
10073 			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
10074 
10075 			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
10076 			{
10077 				const vk::VkDeviceSize	size		= sizes[sizeNdx];
10078 				const string			testName	(de::toString((deUint64)(size)));
10079 				const TestConfig		config		=
10080 				{
10081 					usage,
10082 					size,
10083 					vk::VK_SHARING_MODE_EXCLUSIVE
10084 				};
10085 
10086 				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
10087 			}
10088 
10089 			group->addChild(usageGroup.get());
10090 			usageGroup.release();
10091 		}
10092 	}
10093 
10094 	{
10095 		Usage all = (Usage)0;
10096 
10097 		for (size_t usageNdx = 0; usageNdx < DE_LENGTH_OF_ARRAY(usages); usageNdx++)
10098 			all = all | usages[usageNdx];
10099 
10100 		{
10101 			const string					usageGroupName	("all");
10102 			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
10103 
10104 			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
10105 			{
10106 				const vk::VkDeviceSize	size		= sizes[sizeNdx];
10107 				const string			testName	(de::toString((deUint64)(size)));
10108 				const TestConfig		config		=
10109 				{
10110 					all,
10111 					size,
10112 					vk::VK_SHARING_MODE_EXCLUSIVE
10113 				};
10114 
10115 				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
10116 			}
10117 
10118 			group->addChild(usageGroup.get());
10119 			usageGroup.release();
10120 		}
10121 
10122 		{
10123 			const string					usageGroupName	("all_device");
10124 			de::MovePtr<tcu::TestCaseGroup>	usageGroup		(new tcu::TestCaseGroup(testCtx, usageGroupName.c_str(), usageGroupName.c_str()));
10125 
10126 			for (size_t sizeNdx = 0; sizeNdx < DE_LENGTH_OF_ARRAY(sizes); sizeNdx++)
10127 			{
10128 				const vk::VkDeviceSize	size		= sizes[sizeNdx];
10129 				const string			testName	(de::toString((deUint64)(size)));
10130 				const TestConfig		config		=
10131 				{
10132 					(Usage)(all & (~(USAGE_HOST_READ|USAGE_HOST_WRITE))),
10133 					size,
10134 					vk::VK_SHARING_MODE_EXCLUSIVE
10135 				};
10136 
10137 				usageGroup->addChild(new InstanceFactory1<MemoryTestInstance, TestConfig, AddPrograms>(testCtx,tcu::NODETYPE_SELF_VALIDATE,  testName, testName, AddPrograms(), config));
10138 			}
10139 
10140 			group->addChild(usageGroup.get());
10141 			usageGroup.release();
10142 		}
10143 	}
10144 
10145 	return group.release();
10146 }
10147 
10148 } // memory
10149 } // vkt
10150