1 /*------------------------------------------------------------------------
2 * Vulkan Conformance Tests
3 * ------------------------
4 *
5 * Copyright (c) 2017 The Khronos Group Inc.
6 *
7 * Licensed under the Apache License, Version 2.0 (the "License");
8 * you may not use this file except in compliance with the License.
9 * You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing, software
14 * distributed under the License is distributed on an "AS IS" BASIS,
15 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16 * See the License for the specific language governing permissions and
17 * limitations under the License.
18 *
19 *//*!
20 * \file vktImageCompressionTranscodingSupport.cpp
21 * \brief Compression transcoding support
22 *//*--------------------------------------------------------------------*/
23
24 #include "vktImageCompressionTranscodingSupport.hpp"
25 #include "vktImageLoadStoreUtil.hpp"
26
27 #include "deUniquePtr.hpp"
28 #include "deStringUtil.hpp"
29 #include "deSharedPtr.hpp"
30 #include "deRandom.hpp"
31
32 #include "vktTestCaseUtil.hpp"
33 #include "vkPrograms.hpp"
34 #include "vkImageUtil.hpp"
35 #include "vkBarrierUtil.hpp"
36 #include "vktImageTestsUtil.hpp"
37 #include "vkBuilderUtil.hpp"
38 #include "vkRef.hpp"
39 #include "vkRefUtil.hpp"
40 #include "vkTypeUtil.hpp"
41 #include "vkQueryUtil.hpp"
42 #include "vkCmdUtil.hpp"
43
44 #include "tcuTextureUtil.hpp"
45 #include "tcuTexture.hpp"
46 #include "tcuCompressedTexture.hpp"
47 #include "tcuVectorType.hpp"
48 #include "tcuResource.hpp"
49 #include "tcuImageIO.hpp"
50 #include "tcuImageCompare.hpp"
51 #include "tcuTestLog.hpp"
52 #include "tcuRGBA.hpp"
53 #include "tcuSurface.hpp"
54
55 #include <vector>
56
57 using namespace vk;
58 namespace vkt
59 {
60 namespace image
61 {
62 namespace
63 {
64 using std::string;
65 using std::vector;
66 using tcu::TestContext;
67 using tcu::TestStatus;
68 using tcu::UVec3;
69 using tcu::IVec3;
70 using tcu::CompressedTexFormat;
71 using tcu::CompressedTexture;
72 using tcu::Resource;
73 using tcu::Archive;
74 using tcu::ConstPixelBufferAccess;
75 using de::MovePtr;
76 using de::SharedPtr;
77 using de::Random;
78
79 typedef SharedPtr<MovePtr<Image> > ImageSp;
80 typedef SharedPtr<Move<VkImageView> > ImageViewSp;
81 typedef SharedPtr<Move<VkDescriptorSet> > SharedVkDescriptorSet;
82
83 enum ShaderType
84 {
85 SHADER_TYPE_COMPUTE,
86 SHADER_TYPE_FRAGMENT,
87 SHADER_TYPE_LAST
88 };
89
90 enum Operation
91 {
92 OPERATION_IMAGE_LOAD,
93 OPERATION_TEXEL_FETCH,
94 OPERATION_TEXTURE,
95 OPERATION_IMAGE_STORE,
96 OPERATION_ATTACHMENT_READ,
97 OPERATION_ATTACHMENT_WRITE,
98 OPERATION_TEXTURE_READ,
99 OPERATION_TEXTURE_WRITE,
100 OPERATION_LAST
101 };
102
103 struct TestParameters
104 {
105 Operation operation;
106 ShaderType shader;
107 UVec3 size;
108 deUint32 layers;
109 ImageType imageType;
110 VkFormat formatCompressed;
111 VkFormat formatUncompressed;
112 deUint32 imagesCount;
113 VkImageUsageFlags compressedImageUsage;
114 VkImageUsageFlags compressedImageViewUsage;
115 VkImageUsageFlags uncompressedImageUsage;
116 bool useMipmaps;
117 VkFormat formatForVerify;
118 bool formatIsASTC;
119 };
120
121 template<typename T>
makeVkSharedPtr(Move<T> move)122 inline SharedPtr<Move<T> > makeVkSharedPtr (Move<T> move)
123 {
124 return SharedPtr<Move<T> >(new Move<T>(move));
125 }
126
127 template<typename T>
makeVkSharedPtr(MovePtr<T> movePtr)128 inline SharedPtr<MovePtr<T> > makeVkSharedPtr (MovePtr<T> movePtr)
129 {
130 return SharedPtr<MovePtr<T> >(new MovePtr<T>(movePtr));
131 }
132
133 const deUint32 SINGLE_LEVEL = 1u;
134 const deUint32 SINGLE_LAYER = 1u;
135
136 enum BinaryCompareMode
137 {
138 COMPARE_MODE_NORMAL,
139 COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING,
140 };
141
142 enum BinaryCompareResult
143 {
144 COMPARE_RESULT_OK,
145 COMPARE_RESULT_ASTC_QUALITY_WARNING,
146 COMPARE_RESULT_FAILED,
147 };
148
149 const deUint32 ASTC_LDR_ERROR_COLOUR = 0xFFFF00FF;
150 const deUint32 ASTC_HDR_ERROR_COLOUR = 0x00000000;
151
BinaryCompare(const void * reference,const void * result,VkDeviceSize sizeInBytes,VkFormat formatForVerify,BinaryCompareMode mode)152 static BinaryCompareResult BinaryCompare(const void *reference,
153 const void *result,
154 VkDeviceSize sizeInBytes,
155 VkFormat formatForVerify,
156 BinaryCompareMode mode)
157 {
158 DE_UNREF(formatForVerify);
159
160 // Compare quickly using deMemCmp
161 if (deMemCmp(reference, result, (size_t)sizeInBytes) == 0)
162 {
163 return COMPARE_RESULT_OK;
164 }
165 // If deMemCmp indicated a mismatch, we can re-check with a manual comparison of
166 // the ref and res images that allows for ASTC error colour mismatches if the ASTC
167 // comparison mode was selected. This slows down the affected ASTC tests if you
168 // didn't pass in the first comparison, but means in the general case the
169 // comparion is still fast.
170 else if (mode == COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
171 {
172 bool bWarn = false;
173 bool bFail = false;
174 const deUint32 *pui32RefVal = (deUint32*)reference;
175 const deUint32 *pui32ResVal = (deUint32*)result;
176
177 DE_ASSERT(formatForVerify == VK_FORMAT_R8G8B8A8_UNORM);
178 size_t numPixels = (size_t)(sizeInBytes / 4) /* bytes */;
179 for (size_t i = 0; i < numPixels; i++)
180 {
181 const deUint32 ref = *pui32RefVal++;
182 const deUint32 res = *pui32ResVal++;
183
184 if (ref != res)
185 {
186 // QualityWarning !1231: If the astc pixel was the ASTC LDR error colour
187 // and the result image has the HDR error colour (or vice versa as the test
188 // cases below sometimes reverse the operands) then issue a quality warning
189 // instead of a failure.
190 if ((ref == ASTC_LDR_ERROR_COLOUR && res == ASTC_HDR_ERROR_COLOUR) ||
191 (ref == ASTC_HDR_ERROR_COLOUR && res == ASTC_LDR_ERROR_COLOUR))
192 {
193 bWarn = true;
194 }
195 else
196 {
197 bFail = true;
198 }
199 }
200 }
201
202 if (!bFail)
203 {
204 return (bWarn)
205 ? (COMPARE_RESULT_ASTC_QUALITY_WARNING)
206 : (COMPARE_RESULT_OK);
207 }
208 }
209
210 return COMPARE_RESULT_FAILED;
211 }
212
FormatIsASTC(VkFormat format)213 static bool FormatIsASTC(VkFormat format)
214 {
215 return deInRange32(format, VK_FORMAT_ASTC_4x4_UNORM_BLOCK, VK_FORMAT_ASTC_12x12_SRGB_BLOCK);
216 }
217
TestStatusASTCQualityWarning()218 static TestStatus TestStatusASTCQualityWarning()
219 {
220 return TestStatus(QP_TEST_RESULT_QUALITY_WARNING, "ASTC HDR error colour output instead of LDR error colour");
221 }
222
223 class BasicTranscodingTestInstance : public TestInstance
224 {
225 public:
226 BasicTranscodingTestInstance (Context& context,
227 const TestParameters& parameters);
228 virtual TestStatus iterate (void) = 0;
229 protected:
230 void generateData (deUint8* toFill,
231 const size_t size,
232 const VkFormat format,
233 const deUint32 layer = 0u,
234 const deUint32 level = 0u);
235 deUint32 getLevelCount ();
236 deUint32 getLayerCount ();
237 UVec3 getLayerDims ();
238 vector<UVec3> getMipLevelSizes (UVec3 baseSize);
239 vector<UVec3> getCompressedMipLevelSizes (const VkFormat compressedFormat,
240 const vector<UVec3>& uncompressedSizes);
241
242 const TestParameters m_parameters;
243 const deUint32 m_blockWidth;
244 const deUint32 m_blockHeight;
245 const deUint32 m_levelCount;
246 const UVec3 m_layerSize;
247
248 // Detected error colour mismatch while verifying image. Output
249 // the ASTC quality warning instead of a pass
250 bool m_bASTCErrorColourMismatch;
251
252 private:
253 deUint32 findMipMapLevelCount ();
254 };
255
findMipMapLevelCount()256 deUint32 BasicTranscodingTestInstance::findMipMapLevelCount ()
257 {
258 deUint32 levelCount = 1;
259
260 // We cannot use mipmap levels which have resolution below block size.
261 // Reduce number of mipmap levels
262 if (m_parameters.useMipmaps)
263 {
264 deUint32 w = m_parameters.size.x();
265 deUint32 h = m_parameters.size.y();
266
267 DE_ASSERT(m_blockWidth > 0u && m_blockHeight > 0u);
268
269 while (w > m_blockWidth && h > m_blockHeight)
270 {
271 w >>= 1;
272 h >>= 1;
273
274 if (w > m_blockWidth && h > m_blockHeight)
275 levelCount++;
276 }
277
278 DE_ASSERT((m_parameters.size.x() >> (levelCount - 1u)) >= m_blockWidth);
279 DE_ASSERT((m_parameters.size.y() >> (levelCount - 1u)) >= m_blockHeight);
280 }
281
282 return levelCount;
283 }
284
BasicTranscodingTestInstance(Context & context,const TestParameters & parameters)285 BasicTranscodingTestInstance::BasicTranscodingTestInstance (Context& context, const TestParameters& parameters)
286 : TestInstance (context)
287 , m_parameters (parameters)
288 , m_blockWidth (getBlockWidth(m_parameters.formatCompressed))
289 , m_blockHeight (getBlockHeight(m_parameters.formatCompressed))
290 , m_levelCount (findMipMapLevelCount())
291 , m_layerSize (getLayerSize(m_parameters.imageType, m_parameters.size))
292 , m_bASTCErrorColourMismatch(false)
293 {
294 DE_ASSERT(deLog2Floor32(m_parameters.size.x()) == deLog2Floor32(m_parameters.size.y()));
295 }
296
getLevelCount()297 deUint32 BasicTranscodingTestInstance::getLevelCount()
298 {
299 return m_levelCount;
300 }
301
getLayerCount()302 deUint32 BasicTranscodingTestInstance::getLayerCount()
303 {
304 return m_parameters.layers;
305 }
306
getLayerDims()307 UVec3 BasicTranscodingTestInstance::getLayerDims()
308 {
309 return m_layerSize;
310 }
311
getMipLevelSizes(UVec3 baseSize)312 vector<UVec3> BasicTranscodingTestInstance::getMipLevelSizes (UVec3 baseSize)
313 {
314 vector<UVec3> levelSizes;
315 const deUint32 levelCount = getLevelCount();
316
317 baseSize.z() = 1u;
318
319 levelSizes.push_back(baseSize);
320
321 if (m_parameters.imageType == IMAGE_TYPE_1D)
322 {
323 baseSize.y() = 1u;
324
325 while (levelSizes.size() < levelCount && (baseSize.x() != 1))
326 {
327 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
328 levelSizes.push_back(baseSize);
329 }
330 }
331 else
332 {
333 while (levelSizes.size() < levelCount && (baseSize.x() != 1 || baseSize.y() != 1))
334 {
335 baseSize.x() = deMax32(baseSize.x() >> 1, 1);
336 baseSize.y() = deMax32(baseSize.y() >> 1, 1);
337 levelSizes.push_back(baseSize);
338 }
339 }
340
341 DE_ASSERT(levelSizes.size() == getLevelCount());
342
343 return levelSizes;
344 }
345
getCompressedMipLevelSizes(const VkFormat compressedFormat,const vector<UVec3> & uncompressedSizes)346 vector<UVec3> BasicTranscodingTestInstance::getCompressedMipLevelSizes (const VkFormat compressedFormat, const vector<UVec3>& uncompressedSizes)
347 {
348 vector<UVec3> levelSizes;
349 vector<UVec3>::const_iterator it;
350
351 for (it = uncompressedSizes.begin(); it != uncompressedSizes.end(); it++)
352 levelSizes.push_back(getCompressedImageResolutionInBlocks(compressedFormat, *it));
353
354 return levelSizes;
355 }
356
generateData(deUint8 * toFill,const size_t size,const VkFormat format,const deUint32 layer,const deUint32 level)357 void BasicTranscodingTestInstance::generateData (deUint8* toFill,
358 const size_t size,
359 const VkFormat format,
360 const deUint32 layer,
361 const deUint32 level)
362 {
363 const deUint8 pattern[] =
364 {
365 // 64-bit values
366 0x11, 0x11, 0x11, 0x11, 0x22, 0x22, 0x22, 0x22,
367 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
368 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
369 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00,
370 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00,
371 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00,
372 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF,
373 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00,
374 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00,
375 0x00, 0x00, 0x00, 0x00, 0xFF, 0x00, 0x00, 0x00,
376 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Positive infinity
377 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Negative infinity
378 0x7F, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
379 0x7F, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
380 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, // Start of a signalling NaN (NANS)
381 0xFF, 0xF7, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
382 0x7F, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
383 0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
384 0xFF, 0xF8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Start of a quiet NaN (NANQ)
385 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
386 // 32-bit values
387 0x7F, 0x80, 0x00, 0x00, // Positive infinity
388 0xFF, 0x80, 0x00, 0x00, // Negative infinity
389 0x7F, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
390 0x7F, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
391 0xFF, 0x80, 0x00, 0x01, // Start of a signalling NaN (NANS)
392 0xFF, 0xBF, 0xFF, 0xFF, // End of a signalling NaN (NANS)
393 0x7F, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
394 0x7F, 0xFF, 0xFF, 0xFF, // End of of a quiet NaN (NANQ)
395 0xFF, 0xC0, 0x00, 0x00, // Start of a quiet NaN (NANQ)
396 0xFF, 0xFF, 0xFF, 0xFF, // End of a quiet NaN (NANQ)
397 0xAA, 0xAA, 0xAA, 0xAA,
398 0x55, 0x55, 0x55, 0x55,
399 };
400
401 deUint8* start = toFill;
402 size_t sizeToRnd = size;
403
404 // Pattern part
405 if (layer == 0 && level == 0 && size >= 2 * sizeof(pattern))
406 {
407 // Rotated pattern
408 for (size_t i = 0; i < sizeof(pattern); i++)
409 start[sizeof(pattern) - i - 1] = pattern[i];
410
411 start += sizeof(pattern);
412 sizeToRnd -= sizeof(pattern);
413
414 // Direct pattern
415 deMemcpy(start, pattern, sizeof(pattern));
416
417 start += sizeof(pattern);
418 sizeToRnd -= sizeof(pattern);
419 }
420
421 // Random part
422 {
423 DE_ASSERT(sizeToRnd % sizeof(deUint32) == 0);
424
425 deUint32* start32 = reinterpret_cast<deUint32*>(start);
426 size_t sizeToRnd32 = sizeToRnd / sizeof(deUint32);
427 deUint32 seed = (layer << 24) ^ (level << 16) ^ static_cast<deUint32>(format);
428 Random rnd (seed);
429
430 for (size_t i = 0; i < sizeToRnd32; i++)
431 start32[i] = rnd.getUint32();
432 }
433
434 {
435 // Remove certain values that may not be preserved based on the uncompressed view format
436 if (isSnormFormat(m_parameters.formatUncompressed))
437 {
438 for (size_t i = 0; i < size; i += 2)
439 {
440 // SNORM fix: due to write operation in SNORM format
441 // replaces 0x00 0x80 to 0x01 0x80
442 if (toFill[i] == 0x00 && toFill[i+1] == 0x80)
443 toFill[i+1] = 0x81;
444 }
445 }
446 else if (isFloatFormat(m_parameters.formatUncompressed))
447 {
448 tcu::TextureFormat textureFormat = mapVkFormat(m_parameters.formatUncompressed);
449
450 if (textureFormat.type == tcu::TextureFormat::HALF_FLOAT)
451 {
452 for (size_t i = 0; i < size; i += 2)
453 {
454 // HALF_FLOAT fix: remove INF and NaN
455 if ((toFill[i+1] & 0x7C) == 0x7C)
456 toFill[i+1] = 0x00;
457 }
458 }
459 else if (textureFormat.type == tcu::TextureFormat::FLOAT)
460 {
461 for (size_t i = 0; i < size; i += 4)
462 {
463 // HALF_FLOAT fix: remove INF and NaN
464 if ((toFill[i+1] & 0x7C) == 0x7C)
465 toFill[i+1] = 0x00;
466 }
467
468 for (size_t i = 0; i < size; i += 4)
469 {
470 // FLOAT fix: remove INF, NaN, and denorm
471 // Little endian fix
472 if (((toFill[i+3] & 0x7F) == 0x7F && (toFill[i+2] & 0x80) == 0x80) || ((toFill[i+3] & 0x7F) == 0x00 && (toFill[i+2] & 0x80) == 0x00))
473 toFill[i+3] = 0x01;
474 // Big endian fix
475 if (((toFill[i+0] & 0x7F) == 0x7F && (toFill[i+1] & 0x80) == 0x80) || ((toFill[i+0] & 0x7F) == 0x00 && (toFill[i+1] & 0x80) == 0x00))
476 toFill[i+0] = 0x01;
477 }
478 }
479 }
480 }
481 }
482
483 class BasicComputeTestInstance : public BasicTranscodingTestInstance
484 {
485 public:
486 BasicComputeTestInstance (Context& context,
487 const TestParameters& parameters);
488 TestStatus iterate (void);
489 protected:
490 struct ImageData
491 {
getImagesCountvkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData492 deUint32 getImagesCount (void) { return static_cast<deUint32>(images.size()); }
getImageViewCountvkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData493 deUint32 getImageViewCount (void) { return static_cast<deUint32>(imagesViews.size()); }
getImageInfoCountvkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData494 deUint32 getImageInfoCount (void) { return static_cast<deUint32>(imagesInfos.size()); }
getImagevkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData495 VkImage getImage (const deUint32 ndx) { return **images[ndx]->get(); }
getImageViewvkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData496 VkImageView getImageView (const deUint32 ndx) { return **imagesViews[ndx]; }
getImageInfovkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData497 VkImageCreateInfo getImageInfo (const deUint32 ndx) { return imagesInfos[ndx]; }
addImagevkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData498 void addImage (MovePtr<Image> image) { images.push_back(makeVkSharedPtr(image)); }
addImageViewvkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData499 void addImageView (Move<VkImageView> imageView) { imagesViews.push_back(makeVkSharedPtr(imageView));}
addImageInfovkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData500 void addImageInfo (const VkImageCreateInfo imageInfo) { imagesInfos.push_back(imageInfo); }
resetViewsvkt::image::__anon5e5553f40111::BasicComputeTestInstance::ImageData501 void resetViews () { imagesViews.clear(); }
502 private:
503 vector<ImageSp> images;
504 vector<ImageViewSp> imagesViews;
505 vector<VkImageCreateInfo> imagesInfos;
506 };
507 void copyDataToImage (const VkCommandBuffer& cmdBuffer,
508 ImageData& imageData,
509 const vector<UVec3>& mipMapSizes,
510 const bool isCompressed);
511 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
512 const VkDescriptorSetLayout& descriptorSetLayout,
513 const VkDescriptorPool& descriptorPool,
514 vector<ImageData>& imageData);
515 bool copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
516 const VkImage& uncompressed,
517 const VkDeviceSize offset,
518 const UVec3& size);
519 void descriptorSetUpdate (VkDescriptorSet descriptorSet,
520 const VkDescriptorImageInfo* descriptorImageInfos);
521 void createImageInfos (ImageData& imageData,
522 const vector<UVec3>& mipMapSizes,
523 const bool isCompressed);
524 bool decompressImage (const VkCommandBuffer& cmdBuffer,
525 vector<ImageData>& imageData,
526 const vector<UVec3>& mipMapSizes);
527 vector<deUint8> m_data;
528 };
529
530
BasicComputeTestInstance(Context & context,const TestParameters & parameters)531 BasicComputeTestInstance::BasicComputeTestInstance (Context& context, const TestParameters& parameters)
532 :BasicTranscodingTestInstance (context, parameters)
533 {
534 }
535
iterate(void)536 TestStatus BasicComputeTestInstance::iterate (void)
537 {
538 const DeviceInterface& vk = m_context.getDeviceInterface();
539 const VkDevice device = m_context.getDevice();
540 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
541 Allocator& allocator = m_context.getDefaultAllocator();
542 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
543 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
544 const UVec3 fullSize (m_parameters.size.x(), m_parameters.size.y(), 1);
545 const vector<UVec3> mipMapSizes = m_parameters.useMipmaps ? getMipLevelSizes (getLayerDims()) : vector<UVec3>(1, fullSize);
546 vector<ImageData> imageData (m_parameters.imagesCount);
547 const deUint32 compressedNdx = 0u;
548 const deUint32 resultImageNdx = m_parameters.imagesCount -1u;
549
550 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
551 {
552 const bool isCompressed = compressedNdx == imageNdx ? true : false;
553 createImageInfos(imageData[imageNdx], mipMapSizes, isCompressed);
554 for (deUint32 infoNdx = 0u; infoNdx < imageData[imageNdx].getImageInfoCount(); ++infoNdx)
555 {
556 imageData[imageNdx].addImage(MovePtr<Image>(new Image(vk, device, allocator, imageData[imageNdx].getImageInfo(infoNdx), MemoryRequirement::Any)));
557 if (isCompressed)
558 {
559 const VkImageViewUsageCreateInfo imageViewUsageKHR =
560 {
561 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
562 DE_NULL, //const void* pNext;
563 m_parameters.compressedImageUsage, //VkImageUsageFlags usage;
564 };
565 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
566 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
567 {
568 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
569 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
570 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u),
571 &imageViewUsageKHR));
572 }
573 }
574 else
575 {
576 imageData[imageNdx].addImageView(makeImageView(vk, device, imageData[imageNdx].getImage(infoNdx),
577 mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed,
578 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 1u, 0u, 1u)));
579 }
580 }
581 }
582
583 {
584 size_t size = 0ull;
585 for(deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
586 {
587 size += static_cast<size_t>(getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]) * getLayerCount());
588 }
589 m_data.resize(size);
590 generateData (&m_data[0], m_data.size(), m_parameters.formatCompressed);
591 }
592
593 switch(m_parameters.operation)
594 {
595 case OPERATION_IMAGE_LOAD:
596 case OPERATION_TEXEL_FETCH:
597 case OPERATION_TEXTURE:
598 copyDataToImage(*cmdBuffer, imageData[compressedNdx], mipMapSizes, true);
599 break;
600 case OPERATION_IMAGE_STORE:
601 copyDataToImage(*cmdBuffer, imageData[1], mipMapSizes, false);
602 break;
603 default:
604 DE_ASSERT(false);
605 break;
606 }
607
608 {
609 Move<VkDescriptorSetLayout> descriptorSetLayout;
610 Move<VkDescriptorPool> descriptorPool;
611
612 DescriptorSetLayoutBuilder descriptorSetLayoutBuilder;
613 DescriptorPoolBuilder descriptorPoolBuilder;
614 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
615 {
616 switch(m_parameters.operation)
617 {
618 case OPERATION_IMAGE_LOAD:
619 case OPERATION_IMAGE_STORE:
620 descriptorSetLayoutBuilder.addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
621 descriptorPoolBuilder.addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
622 break;
623 case OPERATION_TEXEL_FETCH:
624 case OPERATION_TEXTURE:
625 descriptorSetLayoutBuilder.addSingleBinding((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT);
626 descriptorPoolBuilder.addType((compressedNdx == imageNdx) ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, imageData[0].getImageViewCount());
627 break;
628 default:
629 DE_ASSERT(false);
630 break;
631 }
632 }
633 descriptorSetLayout = descriptorSetLayoutBuilder.build(vk, device);
634 descriptorPool = descriptorPoolBuilder.build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, imageData[0].getImageViewCount());
635 executeShader(*cmdBuffer, *descriptorSetLayout, *descriptorPool, imageData);
636
637 {
638 VkDeviceSize offset = 0ull;
639 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
640 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
641 {
642 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
643 const UVec3 size = UVec3(imageData[resultImageNdx].getImageInfo(imageNdx).extent.width,
644 imageData[resultImageNdx].getImageInfo(imageNdx).extent.height,
645 imageData[resultImageNdx].getImageInfo(imageNdx).extent.depth);
646 if (!copyResultAndCompare(*cmdBuffer, imageData[resultImageNdx].getImage(imageNdx), offset, size))
647 return TestStatus::fail("Fail");
648 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
649 }
650 }
651 };
652 if (!decompressImage(*cmdBuffer, imageData, mipMapSizes))
653 return TestStatus::fail("Fail");
654
655 if (m_bASTCErrorColourMismatch)
656 {
657 DE_ASSERT(m_parameters.formatIsASTC);
658 return TestStatusASTCQualityWarning();
659 }
660
661 return TestStatus::pass("Pass");
662 }
663
copyDataToImage(const VkCommandBuffer & cmdBuffer,ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)664 void BasicComputeTestInstance::copyDataToImage (const VkCommandBuffer& cmdBuffer,
665 ImageData& imageData,
666 const vector<UVec3>& mipMapSizes,
667 const bool isCompressed)
668 {
669 const DeviceInterface& vk = m_context.getDeviceInterface();
670 const VkDevice device = m_context.getDevice();
671 const VkQueue queue = m_context.getUniversalQueue();
672 Allocator& allocator = m_context.getDefaultAllocator();
673
674 Buffer imageBuffer (vk, device, allocator,
675 makeBufferCreateInfo(m_data.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT),
676 MemoryRequirement::HostVisible);
677 VkDeviceSize offset = 0ull;
678 {
679 const Allocation& alloc = imageBuffer.getAllocation();
680 deMemcpy(alloc.getHostPtr(), &m_data[0], m_data.size());
681 flushAlloc(vk, device, alloc);
682 }
683
684 beginCommandBuffer(vk, cmdBuffer);
685 const VkImageSubresourceRange subresourceRange =
686 {
687 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
688 0u, //deUint32 baseMipLevel
689 imageData.getImageInfo(0u).mipLevels, //deUint32 levelCount
690 0u, //deUint32 baseArrayLayer
691 imageData.getImageInfo(0u).arrayLayers //deUint32 layerCount
692 };
693
694 for (deUint32 imageNdx = 0u; imageNdx < imageData.getImagesCount(); ++imageNdx)
695 {
696 const VkImageMemoryBarrier preCopyImageBarrier = makeImageMemoryBarrier(
697 0u, VK_ACCESS_TRANSFER_WRITE_BIT,
698 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
699 imageData.getImage(imageNdx), subresourceRange);
700
701 const VkBufferMemoryBarrier FlushHostCopyBarrier = makeBufferMemoryBarrier(
702 VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
703 imageBuffer.get(), 0ull, m_data.size());
704
705 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
706 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &FlushHostCopyBarrier, 1u, &preCopyImageBarrier);
707
708 for (deUint32 mipNdx = 0u; mipNdx < imageData.getImageInfo(imageNdx).mipLevels; ++mipNdx)
709 {
710 const VkExtent3D imageExtent = isCompressed ?
711 makeExtent3D(mipMapSizes[mipNdx]) :
712 imageData.getImageInfo(imageNdx).extent;
713 const VkBufferImageCopy copyRegion =
714 {
715 offset, //VkDeviceSize bufferOffset;
716 0u, //deUint32 bufferRowLength;
717 0u, //deUint32 bufferImageHeight;
718 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 0u, imageData.getImageInfo(imageNdx).arrayLayers), //VkImageSubresourceLayers imageSubresource;
719 makeOffset3D(0, 0, 0), //VkOffset3D imageOffset;
720 imageExtent, //VkExtent3D imageExtent;
721 };
722
723 vk.cmdCopyBufferToImage(cmdBuffer, imageBuffer.get(), imageData.getImage(imageNdx), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
724 offset += getCompressedImageSizeInBytes(m_parameters.formatCompressed,
725 UVec3(isCompressed ? imageExtent.width : imageExtent.width * m_blockWidth, isCompressed? imageExtent.height :imageExtent.height * m_blockHeight,imageExtent.depth)) *
726 imageData.getImageInfo(imageNdx).arrayLayers;
727 }
728 }
729 endCommandBuffer(vk, cmdBuffer);
730 submitCommandsAndWait(vk, device, queue, cmdBuffer);
731 }
732
executeShader(const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)733 void BasicComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
734 const VkDescriptorSetLayout& descriptorSetLayout,
735 const VkDescriptorPool& descriptorPool,
736 vector<ImageData>& imageData)
737 {
738 const DeviceInterface& vk = m_context.getDeviceInterface();
739 const VkDevice device = m_context.getDevice();
740 const VkQueue queue = m_context.getUniversalQueue();
741 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
742 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
743 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
744 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
745 Move<VkSampler> sampler;
746 {
747 const VkSamplerCreateInfo createInfo =
748 {
749 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
750 DE_NULL, //const void* pNext;
751 0u, //VkSamplerCreateFlags flags;
752 VK_FILTER_NEAREST, //VkFilter magFilter;
753 VK_FILTER_NEAREST, //VkFilter minFilter;
754 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
755 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
756 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
757 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
758 0.0f, //float mipLodBias;
759 VK_FALSE, //VkBool32 anisotropyEnable;
760 1.0f, //float maxAnisotropy;
761 VK_FALSE, //VkBool32 compareEnable;
762 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
763 0.0f, //float minLod;
764 0.0f, //float maxLod;
765 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
766 VK_FALSE, //VkBool32 unnormalizedCoordinates;
767 };
768 sampler = createSampler(vk, device, &createInfo);
769 }
770
771 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
772 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
773 {
774 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
775 for (deUint32 imageNdx = 0; imageNdx < m_parameters.imagesCount; ++imageNdx)
776 {
777 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
778 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
779 }
780 }
781
782 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
783 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
784
785 beginCommandBuffer(vk, cmdBuffer);
786 {
787 const VkImageSubresourceRange compressedRange =
788 {
789 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
790 0u, //deUint32 baseMipLevel
791 imageData[0].getImageInfo(0u).mipLevels, //deUint32 levelCount
792 0u, //deUint32 baseArrayLayer
793 imageData[0].getImageInfo(0u).arrayLayers //deUint32 layerCount
794 };
795 const VkImageSubresourceRange uncompressedRange =
796 {
797 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
798 0u, //deUint32 baseMipLevel
799 1u, //deUint32 levelCount
800 0u, //deUint32 baseArrayLayer
801 1u //deUint32 layerCount
802 };
803
804 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
805
806 vector<VkImageMemoryBarrier> preShaderImageBarriers;
807 preShaderImageBarriers.resize(descriptorSets.size() + 1u);
808 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
809 {
810 preShaderImageBarriers[imageNdx]= makeImageMemoryBarrier(
811 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
812 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
813 imageData[1].getImage(imageNdx), uncompressedRange);
814 }
815
816 preShaderImageBarriers[descriptorSets.size()] = makeImageMemoryBarrier(
817 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
818 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
819 imageData[0].getImage(0), compressedRange);
820
821 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
822 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
823 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
824
825 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
826 {
827 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
828 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
829 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
830 imageData[1].getImageInfo(ndx).extent.height,
831 imageData[1].getImageInfo(ndx).extent.depth);
832 }
833 }
834 endCommandBuffer(vk, cmdBuffer);
835 submitCommandsAndWait(vk, device, queue, cmdBuffer);
836 }
837
copyResultAndCompare(const VkCommandBuffer & cmdBuffer,const VkImage & uncompressed,const VkDeviceSize offset,const UVec3 & size)838 bool BasicComputeTestInstance::copyResultAndCompare (const VkCommandBuffer& cmdBuffer,
839 const VkImage& uncompressed,
840 const VkDeviceSize offset,
841 const UVec3& size)
842 {
843 const DeviceInterface& vk = m_context.getDeviceInterface();
844 const VkQueue queue = m_context.getUniversalQueue();
845 const VkDevice device = m_context.getDevice();
846 Allocator& allocator = m_context.getDefaultAllocator();
847
848 VkDeviceSize imageResultSize = getImageSizeBytes (tcu::IVec3(size.x(), size.y(), size.z()), m_parameters.formatUncompressed);
849 Buffer imageBufferResult (vk, device, allocator,
850 makeBufferCreateInfo(imageResultSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT),
851 MemoryRequirement::HostVisible);
852
853 beginCommandBuffer(vk, cmdBuffer);
854 {
855 const VkImageSubresourceRange subresourceRange =
856 {
857 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
858 0u, //deUint32 baseMipLevel
859 1u, //deUint32 levelCount
860 0u, //deUint32 baseArrayLayer
861 1u //deUint32 layerCount
862 };
863
864 const VkBufferImageCopy copyRegion =
865 {
866 0ull, // VkDeviceSize bufferOffset;
867 0u, // deUint32 bufferRowLength;
868 0u, // deUint32 bufferImageHeight;
869 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
870 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
871 makeExtent3D(size), // VkExtent3D imageExtent;
872 };
873
874 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
875 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
876 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
877 uncompressed, subresourceRange);
878
879 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
880 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
881 imageBufferResult.get(), 0ull, imageResultSize);
882
883 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1u, &prepareForTransferBarrier);
884 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, imageBufferResult.get(), 1u, ©Region);
885 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0u, (const VkImageMemoryBarrier*)DE_NULL);
886 }
887 endCommandBuffer(vk, cmdBuffer);
888 submitCommandsAndWait(vk, device, queue, cmdBuffer);
889
890 const Allocation& allocResult = imageBufferResult.getAllocation();
891 invalidateAlloc(vk, device, allocResult);
892 if (deMemCmp((const void *)allocResult.getHostPtr(), (const void *)&m_data[static_cast<size_t>(offset)], static_cast<size_t>(imageResultSize)) == 0ull)
893 return true;
894 return false;
895 }
896
descriptorSetUpdate(VkDescriptorSet descriptorSet,const VkDescriptorImageInfo * descriptorImageInfos)897 void BasicComputeTestInstance::descriptorSetUpdate (VkDescriptorSet descriptorSet, const VkDescriptorImageInfo* descriptorImageInfos)
898 {
899 const DeviceInterface& vk = m_context.getDeviceInterface();
900 const VkDevice device = m_context.getDevice();
901 DescriptorSetUpdateBuilder descriptorSetUpdateBuilder;
902
903 switch(m_parameters.operation)
904 {
905 case OPERATION_IMAGE_LOAD:
906 case OPERATION_IMAGE_STORE:
907 {
908 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
909 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
910
911 break;
912 }
913
914 case OPERATION_TEXEL_FETCH:
915 case OPERATION_TEXTURE:
916 {
917 for (deUint32 bindingNdx = 0u; bindingNdx < m_parameters.imagesCount; ++bindingNdx)
918 {
919 descriptorSetUpdateBuilder.writeSingle(descriptorSet, DescriptorSetUpdateBuilder::Location::binding(bindingNdx),
920 bindingNdx == 0u ? VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER : VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[bindingNdx]);
921 }
922
923 break;
924 }
925
926 default:
927 DE_ASSERT(false);
928 }
929 descriptorSetUpdateBuilder.update(vk, device);
930 }
931
createImageInfos(ImageData & imageData,const vector<UVec3> & mipMapSizes,const bool isCompressed)932 void BasicComputeTestInstance::createImageInfos (ImageData& imageData, const vector<UVec3>& mipMapSizes, const bool isCompressed)
933 {
934 const VkImageType imageType = mapImageType(m_parameters.imageType);
935
936 if (isCompressed)
937 {
938 VkFormatProperties properties;
939 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), m_parameters.formatCompressed, &properties);
940 if (!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
941 TCU_THROW(NotSupportedError, "Format storage feature not supported");
942
943 const VkExtent3D extentCompressed = makeExtent3D(getLayerSize(m_parameters.imageType, m_parameters.size));
944 const VkImageCreateInfo compressedInfo =
945 {
946 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
947 DE_NULL, // const void* pNext;
948 VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT |
949 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR |
950 VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR, // VkImageCreateFlags flags;
951 imageType, // VkImageType imageType;
952 m_parameters.formatCompressed, // VkFormat format;
953 extentCompressed, // VkExtent3D extent;
954 static_cast<deUint32>(mipMapSizes.size()), // deUint32 mipLevels;
955 getLayerCount(), // deUint32 arrayLayers;
956 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
957 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
958 VK_IMAGE_USAGE_SAMPLED_BIT |
959 VK_IMAGE_USAGE_STORAGE_BIT |
960 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
961 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
962 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
963 0u, // deUint32 queueFamilyIndexCount;
964 DE_NULL, // const deUint32* pQueueFamilyIndices;
965 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
966 };
967 imageData.addImageInfo(compressedInfo);
968 }
969 else
970 {
971 UVec3 size = m_parameters.size;
972 size.z() = 1;
973 const VkExtent3D originalResolutionInBlocks = makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, size));
974
975 for (size_t mipNdx = 0ull; mipNdx < mipMapSizes.size(); ++mipNdx)
976 for (size_t layerNdx = 0ull; layerNdx < getLayerCount(); ++layerNdx)
977 {
978 const VkExtent3D extentUncompressed = m_parameters.useMipmaps ?
979 makeExtent3D(getCompressedImageResolutionInBlocks(m_parameters.formatCompressed, mipMapSizes[mipNdx])) :
980 originalResolutionInBlocks;
981 const VkImageCreateInfo uncompressedInfo =
982 {
983 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
984 DE_NULL, // const void* pNext;
985 0u, // VkImageCreateFlags flags;
986 imageType, // VkImageType imageType;
987 m_parameters.formatUncompressed, // VkFormat format;
988 extentUncompressed, // VkExtent3D extent;
989 1u, // deUint32 mipLevels;
990 1u, // deUint32 arrayLayers;
991 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
992 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
993 m_parameters.uncompressedImageUsage |
994 VK_IMAGE_USAGE_SAMPLED_BIT, // VkImageUsageFlags usage;
995 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
996 0u, // deUint32 queueFamilyIndexCount;
997 DE_NULL, // const deUint32* pQueueFamilyIndices;
998 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
999 };
1000 imageData.addImageInfo(uncompressedInfo);
1001 }
1002 }
1003 }
1004
decompressImage(const VkCommandBuffer & cmdBuffer,vector<ImageData> & imageData,const vector<UVec3> & mipMapSizes)1005 bool BasicComputeTestInstance::decompressImage (const VkCommandBuffer& cmdBuffer,
1006 vector<ImageData>& imageData,
1007 const vector<UVec3>& mipMapSizes)
1008 {
1009 const DeviceInterface& vk = m_context.getDeviceInterface();
1010 const VkDevice device = m_context.getDevice();
1011 const VkQueue queue = m_context.getUniversalQueue();
1012 Allocator& allocator = m_context.getDefaultAllocator();
1013 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("decompress"), 0));
1014 const VkImage& compressed = imageData[0].getImage(0);
1015 const VkImageType imageType = mapImageType(m_parameters.imageType);
1016
1017 for (deUint32 ndx = 0u; ndx < imageData.size(); ndx++)
1018 imageData[ndx].resetViews();
1019
1020 for (deUint32 mipNdx = 0u; mipNdx < mipMapSizes.size(); ++mipNdx)
1021 for (deUint32 layerNdx = 0u; layerNdx < getLayerCount(); ++layerNdx)
1022 {
1023 const bool layoutShaderReadOnly = (layerNdx % 2u) == 1;
1024 const deUint32 imageNdx = layerNdx + mipNdx * getLayerCount();
1025 const VkExtent3D extentCompressed = makeExtent3D(mipMapSizes[mipNdx]);
1026 const VkImage& uncompressed = imageData[m_parameters.imagesCount -1].getImage(imageNdx);
1027 const VkExtent3D extentUncompressed = imageData[m_parameters.imagesCount -1].getImageInfo(imageNdx).extent;
1028 const VkDeviceSize bufferSizeComp = getCompressedImageSizeInBytes(m_parameters.formatCompressed, mipMapSizes[mipNdx]);
1029
1030 VkFormatProperties properties;
1031 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), m_parameters.formatForVerify, &properties);
1032 if (!(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1033 TCU_THROW(NotSupportedError, "Format storage feature not supported");
1034
1035 const VkImageCreateInfo decompressedImageInfo =
1036 {
1037 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1038 DE_NULL, // const void* pNext;
1039 0u, // VkImageCreateFlags flags;
1040 imageType, // VkImageType imageType;
1041 m_parameters.formatForVerify, // VkFormat format;
1042 extentCompressed, // VkExtent3D extent;
1043 1u, // deUint32 mipLevels;
1044 1u, // deUint32 arrayLayers;
1045 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1046 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1047 VK_IMAGE_USAGE_SAMPLED_BIT |
1048 VK_IMAGE_USAGE_STORAGE_BIT |
1049 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
1050 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
1051 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1052 0u, // deUint32 queueFamilyIndexCount;
1053 DE_NULL, // const deUint32* pQueueFamilyIndices;
1054 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1055 };
1056
1057 const VkImageCreateInfo compressedImageInfo =
1058 {
1059 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1060 DE_NULL, // const void* pNext;
1061 0u, // VkImageCreateFlags flags;
1062 imageType, // VkImageType imageType;
1063 m_parameters.formatCompressed, // VkFormat format;
1064 extentCompressed, // VkExtent3D extent;
1065 1u, // deUint32 mipLevels;
1066 1u, // deUint32 arrayLayers;
1067 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1068 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1069 VK_IMAGE_USAGE_SAMPLED_BIT |
1070 VK_IMAGE_USAGE_TRANSFER_DST_BIT, // VkImageUsageFlags usage;
1071 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1072 0u, // deUint32 queueFamilyIndexCount;
1073 DE_NULL, // const deUint32* pQueueFamilyIndices;
1074 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1075 };
1076 const VkImageUsageFlags compressedViewUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1077 const VkImageViewUsageCreateInfo compressedViewUsageCI =
1078 {
1079 VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR, //VkStructureType sType;
1080 DE_NULL, //const void* pNext;
1081 compressedViewUsageFlags, //VkImageUsageFlags usage;
1082 };
1083 const VkImageViewType imageViewType (mapImageViewType(m_parameters.imageType));
1084 Image resultImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1085 Image referenceImage (vk, device, allocator, decompressedImageInfo, MemoryRequirement::Any);
1086 Image uncompressedImage (vk, device, allocator, compressedImageInfo, MemoryRequirement::Any);
1087 Move<VkImageView> resultView = makeImageView(vk, device, resultImage.get(), imageViewType, decompressedImageInfo.format,
1088 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1089 Move<VkImageView> referenceView = makeImageView(vk, device, referenceImage.get(), imageViewType, decompressedImageInfo.format,
1090 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, decompressedImageInfo.extent.depth, 0u, decompressedImageInfo.arrayLayers));
1091 Move<VkImageView> uncompressedView = makeImageView(vk, device, uncompressedImage.get(), imageViewType, m_parameters.formatCompressed,
1092 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, compressedImageInfo.extent.depth, 0u, compressedImageInfo.arrayLayers));
1093 Move<VkImageView> compressedView = makeImageView(vk, device, compressed, imageViewType, m_parameters.formatCompressed,
1094 makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, mipNdx, 1u, layerNdx, 1u), &compressedViewUsageCI);
1095 Move<VkDescriptorSetLayout> descriptorSetLayout = DescriptorSetLayoutBuilder()
1096 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1097 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_COMPUTE_BIT)
1098 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1099 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_COMPUTE_BIT)
1100 .build(vk, device);
1101 Move<VkDescriptorPool> descriptorPool = DescriptorPoolBuilder()
1102 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1103 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, decompressedImageInfo.arrayLayers)
1104 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1105 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, decompressedImageInfo.arrayLayers)
1106 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, decompressedImageInfo.arrayLayers);
1107
1108 Move<VkDescriptorSet> descriptorSet = makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout);
1109 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1110 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1111 const VkDeviceSize bufferSize = getImageSizeBytes(IVec3((int)extentCompressed.width, (int)extentCompressed.height, (int)extentCompressed.depth), m_parameters.formatForVerify);
1112 Buffer resultBuffer (vk, device, allocator,
1113 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1114 Buffer referenceBuffer (vk, device, allocator,
1115 makeBufferCreateInfo(bufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1116 Buffer transferBuffer (vk, device, allocator,
1117 makeBufferCreateInfo(bufferSizeComp, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT), MemoryRequirement::HostVisible);
1118 Move<VkSampler> sampler;
1119 {
1120 const VkSamplerCreateInfo createInfo =
1121 {
1122 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1123 DE_NULL, //const void* pNext;
1124 0u, //VkSamplerCreateFlags flags;
1125 VK_FILTER_NEAREST, //VkFilter magFilter;
1126 VK_FILTER_NEAREST, //VkFilter minFilter;
1127 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1128 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1129 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1130 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1131 0.0f, //float mipLodBias;
1132 VK_FALSE, //VkBool32 anisotropyEnable;
1133 1.0f, //float maxAnisotropy;
1134 VK_FALSE, //VkBool32 compareEnable;
1135 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1136 0.0f, //float minLod;
1137 1.0f, //float maxLod;
1138 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1139 VK_FALSE, //VkBool32 unnormalizedCoordinates;
1140 };
1141 sampler = createSampler(vk, device, &createInfo);
1142 }
1143
1144 VkDescriptorImageInfo descriptorImageInfos[] =
1145 {
1146 makeDescriptorImageInfo(*sampler, *uncompressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1147 makeDescriptorImageInfo(*sampler, *compressedView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL),
1148 makeDescriptorImageInfo(DE_NULL, *resultView, VK_IMAGE_LAYOUT_GENERAL),
1149 makeDescriptorImageInfo(DE_NULL, *referenceView, VK_IMAGE_LAYOUT_GENERAL)
1150 };
1151 DescriptorSetUpdateBuilder()
1152 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[0])
1153 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorImageInfos[1])
1154 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[2])
1155 .writeSingle(descriptorSet.get(), DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorImageInfos[3])
1156 .update(vk, device);
1157
1158
1159 beginCommandBuffer(vk, cmdBuffer);
1160 {
1161 const VkImageSubresourceRange subresourceRange =
1162 {
1163 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1164 0u, //deUint32 baseMipLevel
1165 1u, //deUint32 levelCount
1166 0u, //deUint32 baseArrayLayer
1167 1u //deUint32 layerCount
1168 };
1169
1170 const VkImageSubresourceRange subresourceRangeComp =
1171 {
1172 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1173 mipNdx, //deUint32 baseMipLevel
1174 1u, //deUint32 levelCount
1175 layerNdx, //deUint32 baseArrayLayer
1176 1u //deUint32 layerCount
1177 };
1178
1179 const VkBufferImageCopy copyRegion =
1180 {
1181 0ull, // VkDeviceSize bufferOffset;
1182 0u, // deUint32 bufferRowLength;
1183 0u, // deUint32 bufferImageHeight;
1184 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1185 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1186 decompressedImageInfo.extent, // VkExtent3D imageExtent;
1187 };
1188
1189 const VkBufferImageCopy compressedCopyRegion =
1190 {
1191 0ull, // VkDeviceSize bufferOffset;
1192 0u, // deUint32 bufferRowLength;
1193 0u, // deUint32 bufferImageHeight;
1194 makeImageSubresourceLayers(VK_IMAGE_ASPECT_COLOR_BIT, 0u, 0u, 1u), // VkImageSubresourceLayers imageSubresource;
1195 makeOffset3D(0, 0, 0), // VkOffset3D imageOffset;
1196 extentUncompressed, // VkExtent3D imageExtent;
1197 };
1198
1199 {
1200
1201 const VkBufferMemoryBarrier preCopyBufferBarriers = makeBufferMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1202 transferBuffer.get(), 0ull, bufferSizeComp);
1203
1204 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1205 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &preCopyBufferBarriers, 0u, (const VkImageMemoryBarrier*)DE_NULL);
1206 }
1207
1208 vk.cmdCopyImageToBuffer(cmdBuffer, uncompressed, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, transferBuffer.get(), 1u, &compressedCopyRegion);
1209
1210 {
1211 const VkBufferMemoryBarrier postCopyBufferBarriers = makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1212 transferBuffer.get(), 0ull, bufferSizeComp);
1213
1214 const VkImageMemoryBarrier preCopyImageBarriers = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT,
1215 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, uncompressedImage.get(), subresourceRange);
1216
1217 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1218 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 1u, &postCopyBufferBarriers, 1u, &preCopyImageBarriers);
1219 }
1220
1221 vk.cmdCopyBufferToImage(cmdBuffer, transferBuffer.get(), uncompressedImage.get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ©Region);
1222
1223 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1224 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1225
1226 {
1227 const VkImageMemoryBarrier preShaderImageBarriers[] =
1228 {
1229
1230 makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1231 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1232 uncompressedImage.get(), subresourceRange),
1233
1234 makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT,
1235 VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL,
1236 compressed, subresourceRangeComp),
1237
1238 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1239 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1240 resultImage.get(), subresourceRange),
1241
1242 makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT,
1243 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1244 referenceImage.get(), subresourceRange)
1245 };
1246
1247 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1248 (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1249 DE_LENGTH_OF_ARRAY(preShaderImageBarriers), preShaderImageBarriers);
1250 }
1251
1252 vk.cmdDispatch(cmdBuffer, extentCompressed.width, extentCompressed.height, extentCompressed.depth);
1253
1254 {
1255 const VkImageMemoryBarrier postShaderImageBarriers[] =
1256 {
1257 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1258 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1259 resultImage.get(), subresourceRange),
1260
1261 makeImageMemoryBarrier(VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1262 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
1263 referenceImage.get(), subresourceRange)
1264 };
1265
1266 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT,
1267 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1268 DE_LENGTH_OF_ARRAY(postShaderImageBarriers), postShaderImageBarriers);
1269 }
1270
1271 vk.cmdCopyImageToBuffer(cmdBuffer, resultImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, resultBuffer.get(), 1u, ©Region);
1272 vk.cmdCopyImageToBuffer(cmdBuffer, referenceImage.get(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, referenceBuffer.get(), 1u, ©Region);
1273
1274 {
1275 const VkBufferMemoryBarrier postCopyBufferBarrier[] =
1276 {
1277 makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1278 resultBuffer.get(), 0ull, bufferSize),
1279
1280 makeBufferMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1281 referenceBuffer.get(), 0ull, bufferSize),
1282 };
1283
1284 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT,
1285 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, DE_LENGTH_OF_ARRAY(postCopyBufferBarrier), postCopyBufferBarrier,
1286 0u, (const VkImageMemoryBarrier*)DE_NULL);
1287 }
1288 }
1289 endCommandBuffer(vk, cmdBuffer);
1290 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1291
1292 const Allocation& resultAlloc = resultBuffer.getAllocation();
1293 const Allocation& referenceAlloc = referenceBuffer.getAllocation();
1294 invalidateAlloc(vk, device, resultAlloc);
1295 invalidateAlloc(vk, device, referenceAlloc);
1296
1297 BinaryCompareMode compareMode =
1298 (m_parameters.formatIsASTC)
1299 ?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
1300 :(COMPARE_MODE_NORMAL);
1301
1302 BinaryCompareResult res = BinaryCompare(referenceAlloc.getHostPtr(),
1303 resultAlloc.getHostPtr(),
1304 (size_t)bufferSize,
1305 m_parameters.formatForVerify,
1306 compareMode);
1307
1308 if (res == COMPARE_RESULT_FAILED)
1309 {
1310 ConstPixelBufferAccess resultPixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, resultAlloc.getHostPtr());
1311 ConstPixelBufferAccess referencePixels (mapVkFormat(decompressedImageInfo.format), decompressedImageInfo.extent.width, decompressedImageInfo.extent.height, decompressedImageInfo.extent.depth, referenceAlloc.getHostPtr());
1312
1313 if(!fuzzyCompare(m_context.getTestContext().getLog(), "Image Comparison", "Image Comparison", resultPixels, referencePixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING))
1314 return false;
1315 }
1316 else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
1317 {
1318 m_bASTCErrorColourMismatch = true;
1319 }
1320 }
1321
1322 return true;
1323 }
1324
1325 class ImageStoreComputeTestInstance : public BasicComputeTestInstance
1326 {
1327 public:
1328 ImageStoreComputeTestInstance (Context& context,
1329 const TestParameters& parameters);
1330 protected:
1331 virtual void executeShader (const VkCommandBuffer& cmdBuffer,
1332 const VkDescriptorSetLayout& descriptorSetLayout,
1333 const VkDescriptorPool& descriptorPool,
1334 vector<ImageData>& imageData);
1335 private:
1336 };
1337
ImageStoreComputeTestInstance(Context & context,const TestParameters & parameters)1338 ImageStoreComputeTestInstance::ImageStoreComputeTestInstance (Context& context, const TestParameters& parameters)
1339 :BasicComputeTestInstance (context, parameters)
1340 {
1341 }
1342
executeShader(const VkCommandBuffer & cmdBuffer,const VkDescriptorSetLayout & descriptorSetLayout,const VkDescriptorPool & descriptorPool,vector<ImageData> & imageData)1343 void ImageStoreComputeTestInstance::executeShader (const VkCommandBuffer& cmdBuffer,
1344 const VkDescriptorSetLayout& descriptorSetLayout,
1345 const VkDescriptorPool& descriptorPool,
1346 vector<ImageData>& imageData)
1347 {
1348 const DeviceInterface& vk = m_context.getDeviceInterface();
1349 const VkDevice device = m_context.getDevice();
1350 const VkQueue queue = m_context.getUniversalQueue();
1351 const Unique<VkShaderModule> shaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("comp"), 0));
1352 vector<SharedVkDescriptorSet> descriptorSets (imageData[0].getImageViewCount());
1353 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, descriptorSetLayout));
1354 const Unique<VkPipeline> pipeline (makeComputePipeline(vk, device, *pipelineLayout, *shaderModule));
1355 Move<VkSampler> sampler;
1356 {
1357 const VkSamplerCreateInfo createInfo =
1358 {
1359 VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, //VkStructureType sType;
1360 DE_NULL, //const void* pNext;
1361 0u, //VkSamplerCreateFlags flags;
1362 VK_FILTER_NEAREST, //VkFilter magFilter;
1363 VK_FILTER_NEAREST, //VkFilter minFilter;
1364 VK_SAMPLER_MIPMAP_MODE_NEAREST, //VkSamplerMipmapMode mipmapMode;
1365 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeU;
1366 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeV;
1367 VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, //VkSamplerAddressMode addressModeW;
1368 0.0f, //float mipLodBias;
1369 VK_FALSE, //VkBool32 anisotropyEnable;
1370 1.0f, //float maxAnisotropy;
1371 VK_FALSE, //VkBool32 compareEnable;
1372 VK_COMPARE_OP_EQUAL, //VkCompareOp compareOp;
1373 0.0f, //float minLod;
1374 0.0f, //float maxLod;
1375 VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, //VkBorderColor borderColor;
1376 VK_TRUE, //VkBool32 unnormalizedCoordinates;
1377 };
1378 sampler = createSampler(vk, device, &createInfo);
1379 }
1380
1381 vector<VkDescriptorImageInfo> descriptorImageInfos (descriptorSets.size() * m_parameters.imagesCount);
1382 for (deUint32 viewNdx = 0u; viewNdx < descriptorSets.size(); ++viewNdx)
1383 {
1384 const deUint32 descriptorNdx = viewNdx * m_parameters.imagesCount;
1385 for (deUint32 imageNdx = 0u; imageNdx < m_parameters.imagesCount; ++imageNdx)
1386 {
1387 descriptorImageInfos[descriptorNdx+imageNdx] = makeDescriptorImageInfo(*sampler,
1388 imageData[imageNdx].getImageView(viewNdx), VK_IMAGE_LAYOUT_GENERAL);
1389 }
1390 }
1391
1392 for (deUint32 ndx = 0u; ndx < descriptorSets.size(); ++ndx)
1393 descriptorSets[ndx] = makeVkSharedPtr(makeDescriptorSet(vk, device, descriptorPool, descriptorSetLayout));
1394
1395 beginCommandBuffer(vk, cmdBuffer);
1396 {
1397 const VkImageSubresourceRange compressedRange =
1398 {
1399 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1400 0u, //deUint32 baseMipLevel
1401 imageData[0].getImageInfo(0).mipLevels, //deUint32 levelCount
1402 0u, //deUint32 baseArrayLayer
1403 imageData[0].getImageInfo(0).arrayLayers //deUint32 layerCount
1404 };
1405
1406 const VkImageSubresourceRange uncompressedRange =
1407 {
1408 VK_IMAGE_ASPECT_COLOR_BIT, //VkImageAspectFlags aspectMask
1409 0u, //deUint32 baseMipLevel
1410 1u, //deUint32 levelCount
1411 0u, //deUint32 baseArrayLayer
1412 1u //deUint32 layerCount
1413 };
1414
1415 vk.cmdBindPipeline(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipeline);
1416
1417 vector<VkImageMemoryBarrier> preShaderImageBarriers (descriptorSets.size() * 2u + 1u);
1418 for (deUint32 imageNdx = 0u; imageNdx < imageData[1].getImagesCount(); ++imageNdx)
1419 {
1420 preShaderImageBarriers[imageNdx] = makeImageMemoryBarrier(
1421 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1422 VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL,
1423 imageData[1].getImage(imageNdx), uncompressedRange);
1424
1425 preShaderImageBarriers[imageNdx + imageData[1].getImagesCount()] = makeImageMemoryBarrier(
1426 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_WRITE_BIT,
1427 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1428 imageData[2].getImage(imageNdx), uncompressedRange);
1429 }
1430
1431 preShaderImageBarriers[preShaderImageBarriers.size()-1] = makeImageMemoryBarrier(
1432 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT,
1433 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL,
1434 imageData[0].getImage(0u), compressedRange);
1435
1436 vk.cmdPipelineBarrier(cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
1437 (VkDependencyFlags)0, 0u, (const VkMemoryBarrier*)DE_NULL, 0u, (const VkBufferMemoryBarrier*)DE_NULL,
1438 static_cast<deUint32>(preShaderImageBarriers.size()), &preShaderImageBarriers[0]);
1439
1440 for (deUint32 ndx = 0u; ndx <descriptorSets.size(); ++ndx)
1441 {
1442 descriptorSetUpdate (**descriptorSets[ndx], &descriptorImageInfos[ndx* m_parameters.imagesCount]);
1443 vk.cmdBindDescriptorSets(cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, *pipelineLayout, 0u, 1u, &(**descriptorSets[ndx]), 0u, DE_NULL);
1444 vk.cmdDispatch(cmdBuffer, imageData[1].getImageInfo(ndx).extent.width,
1445 imageData[1].getImageInfo(ndx).extent.height,
1446 imageData[1].getImageInfo(ndx).extent.depth);
1447 }
1448 }
1449 endCommandBuffer(vk, cmdBuffer);
1450 submitCommandsAndWait(vk, device, queue, cmdBuffer);
1451 }
1452
1453 class GraphicsAttachmentsTestInstance : public BasicTranscodingTestInstance
1454 {
1455 public:
1456 GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters);
1457 virtual TestStatus iterate (void);
1458
1459 protected:
1460 virtual bool isWriteToCompressedOperation ();
1461 VkImageCreateInfo makeCreateImageInfo (const VkFormat format,
1462 const ImageType type,
1463 const UVec3& size,
1464 const VkImageUsageFlags usageFlags,
1465 const VkImageCreateFlags* createFlags,
1466 const deUint32 levels,
1467 const deUint32 layers);
1468 VkDeviceSize getCompressedImageData (const VkFormat format,
1469 const UVec3& size,
1470 std::vector<deUint8>& data,
1471 const deUint32 layer,
1472 const deUint32 level);
1473 VkDeviceSize getUncompressedImageData (const VkFormat format,
1474 const UVec3& size,
1475 std::vector<deUint8>& data,
1476 const deUint32 layer,
1477 const deUint32 level);
1478 virtual void prepareData ();
1479 virtual void prepareVertexBuffer ();
1480 virtual void transcodeRead ();
1481 virtual void transcodeWrite ();
1482 bool verifyDecompression (const std::vector<deUint8>& refCompressedData,
1483 const de::MovePtr<Image>& resCompressedImage,
1484 const deUint32 layer,
1485 const deUint32 level,
1486 const UVec3& mipmapDims);
1487
1488 typedef std::vector<deUint8> RawDataVector;
1489 typedef SharedPtr<RawDataVector> RawDataPtr;
1490 typedef std::vector<RawDataPtr> LevelData;
1491 typedef std::vector<LevelData> FullImageData;
1492
1493 FullImageData m_srcData;
1494 FullImageData m_dstData;
1495
1496 typedef SharedPtr<Image> ImagePtr;
1497 typedef std::vector<ImagePtr> LevelImages;
1498 typedef std::vector<LevelImages> ImagesArray;
1499
1500 ImagesArray m_uncompressedImages;
1501 MovePtr<Image> m_compressedImage;
1502
1503 VkImageViewUsageCreateInfo m_imageViewUsageKHR;
1504 VkImageViewUsageCreateInfo* m_srcImageViewUsageKHR;
1505 VkImageViewUsageCreateInfo* m_dstImageViewUsageKHR;
1506 std::vector<tcu::UVec3> m_compressedImageResVec;
1507 std::vector<tcu::UVec3> m_uncompressedImageResVec;
1508 VkFormat m_srcFormat;
1509 VkFormat m_dstFormat;
1510 VkImageUsageFlags m_srcImageUsageFlags;
1511 VkImageUsageFlags m_dstImageUsageFlags;
1512 std::vector<tcu::UVec3> m_srcImageResolutions;
1513 std::vector<tcu::UVec3> m_dstImageResolutions;
1514
1515 MovePtr<Buffer> m_vertexBuffer;
1516 deUint32 m_vertexCount;
1517 VkDeviceSize m_vertexBufferOffset;
1518 };
1519
GraphicsAttachmentsTestInstance(Context & context,const TestParameters & parameters)1520 GraphicsAttachmentsTestInstance::GraphicsAttachmentsTestInstance (Context& context, const TestParameters& parameters)
1521 : BasicTranscodingTestInstance(context, parameters)
1522 , m_srcData()
1523 , m_dstData()
1524 , m_uncompressedImages()
1525 , m_compressedImage()
1526 , m_imageViewUsageKHR()
1527 , m_srcImageViewUsageKHR()
1528 , m_dstImageViewUsageKHR()
1529 , m_compressedImageResVec()
1530 , m_uncompressedImageResVec()
1531 , m_srcFormat()
1532 , m_dstFormat()
1533 , m_srcImageUsageFlags()
1534 , m_dstImageUsageFlags()
1535 , m_srcImageResolutions()
1536 , m_dstImageResolutions()
1537 , m_vertexBuffer()
1538 , m_vertexCount(0u)
1539 , m_vertexBufferOffset(0ull)
1540 {
1541 }
1542
iterate(void)1543 TestStatus GraphicsAttachmentsTestInstance::iterate (void)
1544 {
1545 prepareData();
1546 prepareVertexBuffer();
1547
1548 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1549 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1550 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1551
1552 if (isWriteToCompressedOperation())
1553 transcodeWrite();
1554 else
1555 transcodeRead();
1556
1557 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1558 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1559 if (isWriteToCompressedOperation())
1560 {
1561 if (!verifyDecompression(*m_srcData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1562 return TestStatus::fail("Images difference detected");
1563 }
1564 else
1565 {
1566 if (!verifyDecompression(*m_dstData[levelNdx][layerNdx], m_compressedImage, levelNdx, layerNdx, m_compressedImageResVec[levelNdx]))
1567 return TestStatus::fail("Images difference detected");
1568 }
1569
1570 if (m_bASTCErrorColourMismatch)
1571 {
1572 DE_ASSERT(m_parameters.formatIsASTC);
1573 return TestStatusASTCQualityWarning();
1574 }
1575
1576 return TestStatus::pass("Pass");
1577 }
1578
prepareData()1579 void GraphicsAttachmentsTestInstance::prepareData ()
1580 {
1581 VkImageViewUsageCreateInfo* imageViewUsageKHRNull = (VkImageViewUsageCreateInfo*)DE_NULL;
1582
1583 m_imageViewUsageKHR = makeImageViewUsageCreateInfo(m_parameters.compressedImageViewUsage);
1584
1585 m_srcImageViewUsageKHR = isWriteToCompressedOperation() ? imageViewUsageKHRNull : &m_imageViewUsageKHR;
1586 m_dstImageViewUsageKHR = isWriteToCompressedOperation() ? &m_imageViewUsageKHR : imageViewUsageKHRNull;
1587
1588 m_srcFormat = isWriteToCompressedOperation() ? m_parameters.formatUncompressed : m_parameters.formatCompressed;
1589 m_dstFormat = isWriteToCompressedOperation() ? m_parameters.formatCompressed : m_parameters.formatUncompressed;
1590
1591 m_srcImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.uncompressedImageUsage : m_parameters.compressedImageUsage;
1592 m_dstImageUsageFlags = isWriteToCompressedOperation() ? m_parameters.compressedImageUsage : m_parameters.uncompressedImageUsage;
1593
1594 m_compressedImageResVec = getMipLevelSizes(getLayerDims());
1595 m_uncompressedImageResVec = getCompressedMipLevelSizes(m_parameters.formatCompressed, m_compressedImageResVec);
1596
1597 m_srcImageResolutions = isWriteToCompressedOperation() ? m_uncompressedImageResVec : m_compressedImageResVec;
1598 m_dstImageResolutions = isWriteToCompressedOperation() ? m_compressedImageResVec : m_uncompressedImageResVec;
1599
1600 m_srcData.resize(getLevelCount());
1601 m_dstData.resize(getLevelCount());
1602 m_uncompressedImages.resize(getLevelCount());
1603
1604 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1605 {
1606 m_srcData[levelNdx].resize(getLayerCount());
1607 m_dstData[levelNdx].resize(getLayerCount());
1608 m_uncompressedImages[levelNdx].resize(getLayerCount());
1609
1610 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1611 {
1612 m_srcData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1613 m_dstData[levelNdx][layerNdx] = SharedPtr<RawDataVector>(new RawDataVector);
1614
1615 if (isWriteToCompressedOperation())
1616 {
1617 getUncompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1618
1619 m_dstData[levelNdx][layerNdx]->resize((size_t)getCompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1620 }
1621 else
1622 {
1623 getCompressedImageData(m_srcFormat, m_srcImageResolutions[levelNdx], *m_srcData[levelNdx][layerNdx], layerNdx, levelNdx);
1624
1625 m_dstData[levelNdx][layerNdx]->resize((size_t)getUncompressedImageSizeInBytes(m_dstFormat, m_dstImageResolutions[levelNdx]));
1626 }
1627
1628 DE_ASSERT(m_srcData[levelNdx][layerNdx]->size() == m_dstData[levelNdx][layerNdx]->size());
1629 }
1630 }
1631 }
1632
prepareVertexBuffer()1633 void GraphicsAttachmentsTestInstance::prepareVertexBuffer ()
1634 {
1635 const DeviceInterface& vk = m_context.getDeviceInterface();
1636 const VkDevice device = m_context.getDevice();
1637 Allocator& allocator = m_context.getDefaultAllocator();
1638
1639 const std::vector<tcu::Vec4> vertexArray = createFullscreenQuad();
1640 const size_t vertexBufferSizeInBytes = vertexArray.size() * sizeof(vertexArray[0]);
1641
1642 m_vertexCount = static_cast<deUint32>(vertexArray.size());
1643 m_vertexBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, makeBufferCreateInfo(vertexBufferSizeInBytes, VK_BUFFER_USAGE_VERTEX_BUFFER_BIT), MemoryRequirement::HostVisible));
1644
1645 // Upload vertex data
1646 const Allocation& vertexBufferAlloc = m_vertexBuffer->getAllocation();
1647 deMemcpy(vertexBufferAlloc.getHostPtr(), &vertexArray[0], vertexBufferSizeInBytes);
1648 flushAlloc(vk, device, vertexBufferAlloc);
1649 }
1650
transcodeRead()1651 void GraphicsAttachmentsTestInstance::transcodeRead ()
1652 {
1653 const DeviceInterface& vk = m_context.getDeviceInterface();
1654 const VkDevice device = m_context.getDevice();
1655 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1656 const VkQueue queue = m_context.getUniversalQueue();
1657 Allocator& allocator = m_context.getDefaultAllocator();
1658
1659 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1660
1661 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1662 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1663
1664 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1665 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1666
1667 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1668
1669 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1670 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1671 .build(vk, device));
1672 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1673 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1674 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1675 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1676
1677 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1678 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1679 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1680
1681 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1682 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1683
1684 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1685 {
1686 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1687 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1688 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1689 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1690 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1691 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
1692
1693 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1694
1695 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1696 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1697
1698 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1699 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1700
1701 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1702 const VkViewport viewport = makeViewport(renderSize);
1703 const VkRect2D scissor = makeRect2D(renderSize);
1704
1705 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1706 {
1707 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1708 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1709
1710 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1711
1712 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1713 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1714
1715 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
1716 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1717 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1718 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1719 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
1720 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, dstImage->get(), dstSubresourceRange);
1721
1722 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1723 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
1724 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1725
1726 // Upload source image data
1727 const Allocation& alloc = srcImageBuffer->getAllocation();
1728 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1729 flushAlloc(vk, device, alloc);
1730
1731 beginCommandBuffer(vk, *cmdBuffer);
1732 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1733
1734 // Copy buffer to image
1735 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1736 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1737 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1738
1739 // Define destination image layout
1740 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1741
1742 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1743
1744 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1745 DescriptorSetUpdateBuilder()
1746 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1747 .update(vk, device);
1748
1749 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1750 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1751
1752 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1753 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1754
1755 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1756
1757 endRenderPass(vk, *cmdBuffer);
1758
1759 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1760 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1761 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1762 dstImage->get(), dstSubresourceRange);
1763
1764 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1765 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1766 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1767
1768 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1769 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1770 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1771
1772 endCommandBuffer(vk, *cmdBuffer);
1773
1774 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1775
1776 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1777 invalidateAlloc(vk, device, dstImageBufferAlloc);
1778 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1779 }
1780 }
1781
1782 m_compressedImage = srcImage;
1783 }
1784
transcodeWrite()1785 void GraphicsAttachmentsTestInstance::transcodeWrite ()
1786 {
1787 const DeviceInterface& vk = m_context.getDeviceInterface();
1788 const VkDevice device = m_context.getDevice();
1789 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
1790 const VkQueue queue = m_context.getUniversalQueue();
1791 Allocator& allocator = m_context.getDefaultAllocator();
1792
1793 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
1794
1795 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
1796 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
1797
1798 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
1799 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
1800
1801 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device, m_parameters.formatUncompressed, m_parameters.formatUncompressed));
1802
1803 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
1804 .addSingleBinding(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, VK_SHADER_STAGE_FRAGMENT_BIT)
1805 .build(vk, device));
1806 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
1807 .addType(VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT)
1808 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
1809 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
1810
1811 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
1812 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
1813 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 1u, true));
1814
1815 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
1816 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
1817
1818 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
1819 {
1820 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
1821 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
1822 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
1823 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
1824 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
1825 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
1826
1827 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
1828
1829 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
1830 const VkViewport viewport = makeViewport(renderSize);
1831 const VkRect2D scissor = makeRect2D(renderSize);
1832
1833 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
1834 {
1835 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
1836 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
1837
1838 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
1839 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
1840
1841 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
1842 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
1843
1844 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
1845
1846 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
1847 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
1848
1849 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
1850 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
1851 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
1852 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_INPUT_ATTACHMENT_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
1853 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
1854 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
1855
1856 const VkImageView attachmentBindInfos[] = { *srcImageView, *dstImageView };
1857 const VkExtent2D framebufferSize (renderSize);
1858 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, DE_LENGTH_OF_ARRAY(attachmentBindInfos), attachmentBindInfos, framebufferSize, SINGLE_LAYER));
1859
1860 // Upload source image data
1861 const Allocation& alloc = srcImageBuffer->getAllocation();
1862 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
1863 flushAlloc(vk, device, alloc);
1864
1865 beginCommandBuffer(vk, *cmdBuffer);
1866 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
1867
1868 // Copy buffer to image
1869 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
1870 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
1871 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
1872
1873 // Define destination image layout
1874 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
1875
1876 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
1877
1878 const VkDescriptorImageInfo descriptorSrcImageInfo(makeDescriptorImageInfo(DE_NULL, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
1879 DescriptorSetUpdateBuilder()
1880 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, &descriptorSrcImageInfo)
1881 .update(vk, device);
1882
1883 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
1884 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
1885
1886 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
1887 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
1888
1889 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
1890
1891 endRenderPass(vk, *cmdBuffer);
1892
1893 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
1894 VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
1895 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
1896 dstImage->get(), dstSubresourceRange);
1897
1898 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
1899 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
1900 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
1901
1902 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
1903 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
1904 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
1905
1906 endCommandBuffer(vk, *cmdBuffer);
1907
1908 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
1909
1910 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
1911 invalidateAlloc(vk, device, dstImageBufferAlloc);
1912 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
1913 }
1914 }
1915
1916 m_compressedImage = dstImage;
1917 }
1918
isWriteToCompressedOperation()1919 bool GraphicsAttachmentsTestInstance::isWriteToCompressedOperation ()
1920 {
1921 return (m_parameters.operation == OPERATION_ATTACHMENT_WRITE);
1922 }
1923
makeCreateImageInfo(const VkFormat format,const ImageType type,const UVec3 & size,const VkImageUsageFlags usageFlags,const VkImageCreateFlags * createFlags,const deUint32 levels,const deUint32 layers)1924 VkImageCreateInfo GraphicsAttachmentsTestInstance::makeCreateImageInfo (const VkFormat format,
1925 const ImageType type,
1926 const UVec3& size,
1927 const VkImageUsageFlags usageFlags,
1928 const VkImageCreateFlags* createFlags,
1929 const deUint32 levels,
1930 const deUint32 layers)
1931 {
1932 const VkImageType imageType = mapImageType(type);
1933 const VkImageCreateFlags imageCreateFlagsBase = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
1934 const VkImageCreateFlags imageCreateFlagsAddOn = isCompressedFormat(format) ? VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR : 0;
1935 const VkImageCreateFlags imageCreateFlags = (createFlags != DE_NULL) ? *createFlags : (imageCreateFlagsBase | imageCreateFlagsAddOn);
1936
1937 VkFormatProperties properties;
1938 m_context.getInstanceInterface().getPhysicalDeviceFormatProperties(m_context.getPhysicalDevice(), format, &properties);
1939 if ((usageFlags & VK_IMAGE_USAGE_STORAGE_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT))
1940 TCU_THROW(NotSupportedError, "Format storage feature not supported");
1941 if ((usageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) && !(properties.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
1942 TCU_THROW(NotSupportedError, "Format color attachment feature not supported");
1943
1944 const VkImageCreateInfo createImageInfo =
1945 {
1946 VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, // VkStructureType sType;
1947 DE_NULL, // const void* pNext;
1948 imageCreateFlags, // VkImageCreateFlags flags;
1949 imageType, // VkImageType imageType;
1950 format, // VkFormat format;
1951 makeExtent3D(getLayerSize(type, size)), // VkExtent3D extent;
1952 levels, // deUint32 mipLevels;
1953 layers, // deUint32 arrayLayers;
1954 VK_SAMPLE_COUNT_1_BIT, // VkSampleCountFlagBits samples;
1955 VK_IMAGE_TILING_OPTIMAL, // VkImageTiling tiling;
1956 usageFlags, // VkImageUsageFlags usage;
1957 VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode;
1958 0u, // deUint32 queueFamilyIndexCount;
1959 DE_NULL, // const deUint32* pQueueFamilyIndices;
1960 VK_IMAGE_LAYOUT_UNDEFINED, // VkImageLayout initialLayout;
1961 };
1962
1963 return createImageInfo;
1964 }
1965
getCompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)1966 VkDeviceSize GraphicsAttachmentsTestInstance::getCompressedImageData (const VkFormat format,
1967 const UVec3& size,
1968 std::vector<deUint8>& data,
1969 const deUint32 layer,
1970 const deUint32 level)
1971 {
1972 VkDeviceSize sizeBytes = getCompressedImageSizeInBytes(format, size);
1973
1974 data.resize((size_t)sizeBytes);
1975 generateData(&data[0], data.size(), format, layer, level);
1976
1977 return sizeBytes;
1978 }
1979
getUncompressedImageData(const VkFormat format,const UVec3 & size,std::vector<deUint8> & data,const deUint32 layer,const deUint32 level)1980 VkDeviceSize GraphicsAttachmentsTestInstance::getUncompressedImageData (const VkFormat format,
1981 const UVec3& size,
1982 std::vector<deUint8>& data,
1983 const deUint32 layer,
1984 const deUint32 level)
1985 {
1986 tcu::IVec3 sizeAsIVec3 = tcu::IVec3(static_cast<int>(size[0]), static_cast<int>(size[1]), static_cast<int>(size[2]));
1987 VkDeviceSize sizeBytes = getImageSizeBytes(sizeAsIVec3, format);
1988
1989 data.resize((size_t)sizeBytes);
1990 generateData(&data[0], data.size(), format, layer, level);
1991
1992 return sizeBytes;
1993 }
1994
verifyDecompression(const std::vector<deUint8> & refCompressedData,const de::MovePtr<Image> & resCompressedImage,const deUint32 level,const deUint32 layer,const UVec3 & mipmapDims)1995 bool GraphicsAttachmentsTestInstance::verifyDecompression (const std::vector<deUint8>& refCompressedData,
1996 const de::MovePtr<Image>& resCompressedImage,
1997 const deUint32 level,
1998 const deUint32 layer,
1999 const UVec3& mipmapDims)
2000 {
2001 const DeviceInterface& vk = m_context.getDeviceInterface();
2002 const VkDevice device = m_context.getDevice();
2003 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2004 const VkQueue queue = m_context.getUniversalQueue();
2005 Allocator& allocator = m_context.getDefaultAllocator();
2006
2007 const bool layoutShaderReadOnly = (layer % 2u) == 1;
2008 const UVec3 mipmapDimsBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, mipmapDims);
2009
2010 const VkImageSubresourceRange subresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2011 const VkImageSubresourceRange resSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, level, SINGLE_LEVEL, layer, SINGLE_LAYER);
2012
2013 const VkDeviceSize dstBufferSize = getUncompressedImageSizeInBytes(m_parameters.formatForVerify, mipmapDims);
2014 const VkImageUsageFlags refSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2015
2016 const VkBufferCreateInfo refSrcImageBufferInfo (makeBufferCreateInfo(refCompressedData.size(), VK_BUFFER_USAGE_TRANSFER_SRC_BIT));
2017 const MovePtr<Buffer> refSrcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refSrcImageBufferInfo, MemoryRequirement::HostVisible));
2018
2019 const VkImageCreateFlags refSrcImageCreateFlags = 0;
2020 const VkImageCreateInfo refSrcImageCreateInfo = makeCreateImageInfo(m_parameters.formatCompressed, m_parameters.imageType, mipmapDimsBlocked, refSrcImageUsageFlags, &refSrcImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2021 const MovePtr<Image> refSrcImage (new Image(vk, device, allocator, refSrcImageCreateInfo, MemoryRequirement::Any));
2022 Move<VkImageView> refSrcImageView (makeImageView(vk, device, refSrcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, subresourceRange));
2023
2024 const VkImageUsageFlags resSrcImageUsageFlags = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2025 const VkImageViewUsageCreateInfo resSrcImageViewUsageKHR = makeImageViewUsageCreateInfo(resSrcImageUsageFlags);
2026 Move<VkImageView> resSrcImageView (makeImageView(vk, device, resCompressedImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatCompressed, resSubresourceRange, &resSrcImageViewUsageKHR));
2027
2028 const VkImageCreateFlags refDstImageCreateFlags = 0;
2029 const VkImageUsageFlags refDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2030 const VkImageCreateInfo refDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, refDstImageUsageFlags, &refDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2031 const MovePtr<Image> refDstImage (new Image(vk, device, allocator, refDstImageCreateInfo, MemoryRequirement::Any));
2032 const Move<VkImageView> refDstImageView (makeImageView(vk, device, refDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2033 const VkImageMemoryBarrier refDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refDstImage->get(), subresourceRange);
2034 const VkBufferCreateInfo refDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2035 const MovePtr<Buffer> refDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, refDstBufferInfo, MemoryRequirement::HostVisible));
2036
2037 const VkImageCreateFlags resDstImageCreateFlags = 0;
2038 const VkImageUsageFlags resDstImageUsageFlags = VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2039 const VkImageCreateInfo resDstImageCreateInfo = makeCreateImageInfo(m_parameters.formatForVerify, m_parameters.imageType, mipmapDims, resDstImageUsageFlags, &resDstImageCreateFlags, SINGLE_LEVEL, SINGLE_LAYER);
2040 const MovePtr<Image> resDstImage (new Image(vk, device, allocator, resDstImageCreateInfo, MemoryRequirement::Any));
2041 const Move<VkImageView> resDstImageView (makeImageView(vk, device, resDstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatForVerify, subresourceRange));
2042 const VkImageMemoryBarrier resDstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, resDstImage->get(), subresourceRange);
2043 const VkBufferCreateInfo resDstBufferInfo (makeBufferCreateInfo(dstBufferSize, VK_BUFFER_USAGE_TRANSFER_DST_BIT));
2044 const MovePtr<Buffer> resDstBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, resDstBufferInfo, MemoryRequirement::HostVisible));
2045
2046 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2047 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag_verify"), 0));
2048
2049 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2050
2051 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2052 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2053 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2054 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2055 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2056 .build(vk, device));
2057 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2058 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2059 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2060 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2061 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2062 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2063 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2064 const VkSamplerCreateInfo refSrcSamplerInfo (makeSamplerCreateInfo());
2065 const Move<VkSampler> refSrcSampler = vk::createSampler(vk, device, &refSrcSamplerInfo);
2066 const VkSamplerCreateInfo resSrcSamplerInfo (makeSamplerCreateInfo());
2067 const Move<VkSampler> resSrcSampler = vk::createSampler(vk, device, &resSrcSamplerInfo);
2068 const VkDescriptorImageInfo descriptorRefSrcImage (makeDescriptorImageInfo(*refSrcSampler, *refSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2069 const VkDescriptorImageInfo descriptorResSrcImage (makeDescriptorImageInfo(*resSrcSampler, *resSrcImageView, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL));
2070 const VkDescriptorImageInfo descriptorRefDstImage (makeDescriptorImageInfo(DE_NULL, *refDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2071 const VkDescriptorImageInfo descriptorResDstImage (makeDescriptorImageInfo(DE_NULL, *resDstImageView, VK_IMAGE_LAYOUT_GENERAL));
2072
2073 const VkExtent2D renderSize (makeExtent2D(mipmapDims.x(), mipmapDims.y()));
2074 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2075 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSize, 0u));
2076 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT, queueFamilyIndex));
2077 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2078
2079 const VkBufferImageCopy copyBufferToImageRegion = makeBufferImageCopy(mipmapDimsBlocked.x(), mipmapDimsBlocked.y(), 0u, 0u, mipmapDimsBlocked.x(), mipmapDimsBlocked.y());
2080 const VkBufferImageCopy copyRegion = makeBufferImageCopy(mipmapDims.x(), mipmapDims.y(), 0u, 0u);
2081 const VkBufferMemoryBarrier refSrcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, refSrcImageBuffer->get(), 0ull, refCompressedData.size());
2082 const VkImageMemoryBarrier refSrcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2083 const VkImageMemoryBarrier refSrcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, refSrcImage->get(), subresourceRange);
2084 const VkImageMemoryBarrier resCompressedImageBarrier = makeImageMemoryBarrier(0, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_GENERAL, layoutShaderReadOnly ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL : VK_IMAGE_LAYOUT_GENERAL, resCompressedImage->get(), resSubresourceRange);
2085
2086 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, renderSize, getLayerCount()));
2087
2088 // Upload source image data
2089 {
2090 const Allocation& refSrcImageBufferAlloc = refSrcImageBuffer->getAllocation();
2091 deMemcpy(refSrcImageBufferAlloc.getHostPtr(), &refCompressedData[0], refCompressedData.size());
2092 flushAlloc(vk, device, refSrcImageBufferAlloc);
2093 }
2094
2095 beginCommandBuffer(vk, *cmdBuffer);
2096 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2097
2098 // Copy buffer to image
2099 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &refSrcCopyBufferBarrierPre, 1u, &refSrcCopyImageBarrierPre);
2100 vk.cmdCopyBufferToImage(*cmdBuffer, refSrcImageBuffer->get(), refSrcImage->get(), VK_IMAGE_LAYOUT_GENERAL, 1u, ©BufferToImageRegion);
2101 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, DE_NULL, 1u, &refSrcCopyImageBarrierPost);
2102
2103 // Make reference and result images readable
2104 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &refDstInitImageBarrier);
2105 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resDstInitImageBarrier);
2106 {
2107 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &resCompressedImageBarrier);
2108 }
2109
2110 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2111 {
2112 DescriptorSetUpdateBuilder()
2113 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorRefSrcImage)
2114 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorResSrcImage)
2115 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(2u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorRefDstImage)
2116 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(3u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorResDstImage)
2117 .update(vk, device);
2118
2119 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2120 vk.cmdBindVertexBuffers(*cmdBuffer, 0, 1, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2121 vk.cmdDraw(*cmdBuffer, m_vertexCount, 1, 0, 0);
2122 }
2123 endRenderPass(vk, *cmdBuffer);
2124
2125 // Decompress reference image
2126 {
2127 const VkImageMemoryBarrier refDstImageBarrier = makeImageMemoryBarrier(
2128 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2129 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2130 refDstImage->get(), subresourceRange);
2131
2132 const VkBufferMemoryBarrier refDstBufferBarrier = makeBufferMemoryBarrier(
2133 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2134 refDstBuffer->get(), 0ull, dstBufferSize);
2135
2136 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &refDstImageBarrier);
2137 vk.cmdCopyImageToBuffer(*cmdBuffer, refDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, refDstBuffer->get(), 1u, ©Region);
2138 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &refDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2139 }
2140
2141 // Decompress result image
2142 {
2143 const VkImageMemoryBarrier resDstImageBarrier = makeImageMemoryBarrier(
2144 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2145 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2146 resDstImage->get(), subresourceRange);
2147
2148 const VkBufferMemoryBarrier resDstBufferBarrier = makeBufferMemoryBarrier(
2149 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2150 resDstBuffer->get(), 0ull, dstBufferSize);
2151
2152 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &resDstImageBarrier);
2153 vk.cmdCopyImageToBuffer(*cmdBuffer, resDstImage->get(), VK_IMAGE_LAYOUT_GENERAL, resDstBuffer->get(), 1u, ©Region);
2154 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, &resDstBufferBarrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2155 }
2156
2157 endCommandBuffer(vk, *cmdBuffer);
2158
2159 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2160
2161 // Compare decompressed pixel data in reference and result images
2162 {
2163 const Allocation& refDstBufferAlloc = refDstBuffer->getAllocation();
2164 invalidateAlloc(vk, device, refDstBufferAlloc);
2165
2166 const Allocation& resDstBufferAlloc = resDstBuffer->getAllocation();
2167 invalidateAlloc(vk, device, resDstBufferAlloc);
2168
2169 BinaryCompareMode compareMode =
2170 (m_parameters.formatIsASTC)
2171 ?(COMPARE_MODE_ALLOW_ASTC_ERROR_COLOUR_WARNING)
2172 :(COMPARE_MODE_NORMAL);
2173
2174 BinaryCompareResult res = BinaryCompare(refDstBufferAlloc.getHostPtr(),
2175 resDstBufferAlloc.getHostPtr(),
2176 dstBufferSize,
2177 m_parameters.formatForVerify,
2178 compareMode);
2179
2180 if (res == COMPARE_RESULT_FAILED)
2181 {
2182 // Do fuzzy to log error mask
2183 invalidateAlloc(vk, device, resDstBufferAlloc);
2184 invalidateAlloc(vk, device, refDstBufferAlloc);
2185
2186 tcu::ConstPixelBufferAccess resPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, resDstBufferAlloc.getHostPtr());
2187 tcu::ConstPixelBufferAccess refPixels (mapVkFormat(m_parameters.formatForVerify), renderSize.width, renderSize.height, 1u, refDstBufferAlloc.getHostPtr());
2188
2189 string comment = string("Image Comparison (level=") + de::toString(level) + string(", layer=") + de::toString(layer) + string(")");
2190
2191 if (isWriteToCompressedOperation())
2192 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), refPixels, resPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2193 else
2194 tcu::fuzzyCompare(m_context.getTestContext().getLog(), "ImageComparison", comment.c_str(), resPixels, refPixels, 0.001f, tcu::COMPARE_LOG_EVERYTHING);
2195
2196 return false;
2197 }
2198 else if (res == COMPARE_RESULT_ASTC_QUALITY_WARNING)
2199 {
2200 m_bASTCErrorColourMismatch = true;
2201 }
2202 }
2203
2204 return true;
2205 }
2206
2207
2208 class GraphicsTextureTestInstance : public GraphicsAttachmentsTestInstance
2209 {
2210 public:
2211 GraphicsTextureTestInstance (Context& context, const TestParameters& parameters);
2212
2213 protected:
2214 virtual bool isWriteToCompressedOperation ();
2215 virtual void transcodeRead ();
2216 virtual void transcodeWrite ();
2217 };
2218
GraphicsTextureTestInstance(Context & context,const TestParameters & parameters)2219 GraphicsTextureTestInstance::GraphicsTextureTestInstance (Context& context, const TestParameters& parameters)
2220 : GraphicsAttachmentsTestInstance(context, parameters)
2221 {
2222 }
2223
isWriteToCompressedOperation()2224 bool GraphicsTextureTestInstance::isWriteToCompressedOperation ()
2225 {
2226 return (m_parameters.operation == OPERATION_TEXTURE_WRITE);
2227 }
2228
transcodeRead()2229 void GraphicsTextureTestInstance::transcodeRead ()
2230 {
2231 const DeviceInterface& vk = m_context.getDeviceInterface();
2232 const VkDevice device = m_context.getDevice();
2233 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2234 const VkQueue queue = m_context.getUniversalQueue();
2235 Allocator& allocator = m_context.getDefaultAllocator();
2236
2237 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2238
2239 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, m_srcImageResolutions[0], m_srcImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2240 MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2241
2242 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2243 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2244
2245 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2246
2247 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2248 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2249 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2250 .build(vk, device));
2251 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2252 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2253 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2254 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2255 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2256
2257 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2258 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2259 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2260
2261 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2262 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2263
2264 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2265 {
2266 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2267 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2268 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2269 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2270 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2271 const UVec3 srcImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, srcImageResolution);
2272
2273 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, dstImageResolution, m_dstImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2274
2275 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2276 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2277
2278 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2279 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2280
2281 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2282 const VkViewport viewport = makeViewport(renderSize);
2283 const VkRect2D scissor = makeRect2D(renderSize);
2284
2285 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2286 {
2287 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2288 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2289
2290 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2291
2292 de::MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2293 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2294
2295 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2296 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2297 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2298 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2299
2300 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), levelNdx, layerNdx, srcImageResBlocked.x(), srcImageResBlocked.y());
2301 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2302 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2303 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2304 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y());
2305 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2306
2307 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2308 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2309
2310 // Upload source image data
2311 const Allocation& alloc = srcImageBuffer->getAllocation();
2312 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2313 flushAlloc(vk, device, alloc);
2314
2315 beginCommandBuffer(vk, *cmdBuffer);
2316 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2317
2318 // Copy buffer to image
2319 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2320 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2321 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2322
2323 // Define destination image layout
2324 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2325
2326 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2327
2328 DescriptorSetUpdateBuilder()
2329 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2330 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2331 .update(vk, device);
2332
2333 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2334 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2335
2336 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2337 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2338
2339 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2340
2341 endRenderPass(vk, *cmdBuffer);
2342
2343 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2344 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2345 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2346 dstImage->get(), dstSubresourceRange);
2347
2348 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2349 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2350 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2351
2352 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2353 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2354 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2355
2356 endCommandBuffer(vk, *cmdBuffer);
2357
2358 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2359
2360 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2361 invalidateAlloc(vk, device, dstImageBufferAlloc);
2362 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2363 }
2364 }
2365
2366 m_compressedImage = srcImage;
2367 }
2368
transcodeWrite()2369 void GraphicsTextureTestInstance::transcodeWrite ()
2370 {
2371 const DeviceInterface& vk = m_context.getDeviceInterface();
2372 const VkDevice device = m_context.getDevice();
2373 const deUint32 queueFamilyIndex = m_context.getUniversalQueueFamilyIndex();
2374 const VkQueue queue = m_context.getUniversalQueue();
2375 Allocator& allocator = m_context.getDefaultAllocator();
2376
2377 const VkImageCreateFlags* imgCreateFlagsOverride = DE_NULL;
2378
2379 const VkImageCreateInfo dstImageCreateInfo = makeCreateImageInfo(m_dstFormat, m_parameters.imageType, m_dstImageResolutions[0], m_dstImageUsageFlags, imgCreateFlagsOverride, getLevelCount(), getLayerCount());
2380 MovePtr<Image> dstImage (new Image(vk, device, allocator, dstImageCreateInfo, MemoryRequirement::Any));
2381
2382 const Unique<VkShaderModule> vertShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("vert"), 0));
2383 const Unique<VkShaderModule> fragShaderModule (createShaderModule(vk, device, m_context.getBinaryCollection().get("frag"), 0));
2384
2385 const Unique<VkRenderPass> renderPass (makeRenderPass(vk, device));
2386
2387 const Move<VkDescriptorSetLayout> descriptorSetLayout (DescriptorSetLayoutBuilder()
2388 .addSingleBinding(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, VK_SHADER_STAGE_FRAGMENT_BIT)
2389 .addSingleBinding(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, VK_SHADER_STAGE_FRAGMENT_BIT)
2390 .build(vk, device));
2391 const Move<VkDescriptorPool> descriptorPool (DescriptorPoolBuilder()
2392 .addType(VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
2393 .addType(VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
2394 .build(vk, device, VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT, 1u));
2395 const Move<VkDescriptorSet> descriptorSet (makeDescriptorSet(vk, device, *descriptorPool, *descriptorSetLayout));
2396
2397 const VkExtent2D renderSizeDummy (makeExtent2D(1u, 1u));
2398 const Unique<VkPipelineLayout> pipelineLayout (makePipelineLayout(vk, device, *descriptorSetLayout));
2399 const Unique<VkPipeline> pipeline (makeGraphicsPipeline(vk, device, *pipelineLayout, *renderPass, *vertShaderModule, *fragShaderModule, renderSizeDummy, 0u, true));
2400
2401 const Unique<VkCommandPool> cmdPool (createCommandPool(vk, device, VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, queueFamilyIndex));
2402 const Unique<VkCommandBuffer> cmdBuffer (allocateCommandBuffer(vk, device, *cmdPool, VK_COMMAND_BUFFER_LEVEL_PRIMARY));
2403
2404 for (deUint32 levelNdx = 0; levelNdx < getLevelCount(); ++levelNdx)
2405 {
2406 const UVec3& uncompressedImageRes = m_uncompressedImageResVec[levelNdx];
2407 const UVec3& srcImageResolution = m_srcImageResolutions[levelNdx];
2408 const UVec3& dstImageResolution = m_dstImageResolutions[levelNdx];
2409 const size_t srcImageSizeInBytes = m_srcData[levelNdx][0]->size();
2410 const size_t dstImageSizeInBytes = m_dstData[levelNdx][0]->size();
2411 const UVec3 dstImageResBlocked = getCompressedImageResolutionBlockCeil(m_parameters.formatCompressed, dstImageResolution);
2412
2413 const VkImageCreateInfo srcImageCreateInfo = makeCreateImageInfo(m_srcFormat, m_parameters.imageType, srcImageResolution, m_srcImageUsageFlags, imgCreateFlagsOverride, SINGLE_LEVEL, SINGLE_LAYER);
2414
2415 const VkExtent2D renderSize (makeExtent2D(uncompressedImageRes.x(), uncompressedImageRes.y()));
2416 const VkViewport viewport = makeViewport(renderSize);
2417 const VkRect2D scissor = makeRect2D(renderSize);
2418
2419 for (deUint32 layerNdx = 0; layerNdx < getLayerCount(); ++layerNdx)
2420 {
2421 const VkBufferCreateInfo srcImageBufferInfo = makeBufferCreateInfo(srcImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_SRC_BIT);
2422 const MovePtr<Buffer> srcImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, srcImageBufferInfo, MemoryRequirement::HostVisible));
2423
2424 const VkBufferCreateInfo dstImageBufferInfo = makeBufferCreateInfo(dstImageSizeInBytes, VK_BUFFER_USAGE_TRANSFER_DST_BIT);
2425 MovePtr<Buffer> dstImageBuffer = MovePtr<Buffer>(new Buffer(vk, device, allocator, dstImageBufferInfo, MemoryRequirement::HostVisible));
2426
2427 const VkImageSubresourceRange srcSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, 0u, SINGLE_LEVEL, 0u, SINGLE_LAYER);
2428 const VkImageSubresourceRange dstSubresourceRange = makeImageSubresourceRange(VK_IMAGE_ASPECT_COLOR_BIT, levelNdx, SINGLE_LEVEL, layerNdx, SINGLE_LAYER);
2429
2430 Move<VkImageView> dstImageView (makeImageView(vk, device, dstImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, dstSubresourceRange, m_dstImageViewUsageKHR));
2431
2432 de::MovePtr<Image> srcImage (new Image(vk, device, allocator, srcImageCreateInfo, MemoryRequirement::Any));
2433 Move<VkImageView> srcImageView (makeImageView(vk, device, srcImage->get(), mapImageViewType(m_parameters.imageType), m_parameters.formatUncompressed, srcSubresourceRange, m_srcImageViewUsageKHR));
2434
2435 const VkSamplerCreateInfo srcSamplerInfo (makeSamplerCreateInfo());
2436 const Move<VkSampler> srcSampler = vk::createSampler(vk, device, &srcSamplerInfo);
2437 const VkDescriptorImageInfo descriptorSrcImage (makeDescriptorImageInfo(*srcSampler, *srcImageView, VK_IMAGE_LAYOUT_GENERAL));
2438 const VkDescriptorImageInfo descriptorDstImage (makeDescriptorImageInfo(DE_NULL, *dstImageView, VK_IMAGE_LAYOUT_GENERAL));
2439
2440 const VkBufferImageCopy srcCopyRegion = makeBufferImageCopy(srcImageResolution.x(), srcImageResolution.y(), 0u, 0u);
2441 const VkBufferMemoryBarrier srcCopyBufferBarrierPre = makeBufferMemoryBarrier(VK_ACCESS_HOST_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT, srcImageBuffer->get(), 0ull, srcImageSizeInBytes);
2442 const VkImageMemoryBarrier srcCopyImageBarrierPre = makeImageMemoryBarrier(0u, VK_ACCESS_TRANSFER_WRITE_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, srcImage->get(), srcSubresourceRange);
2443 const VkImageMemoryBarrier srcCopyImageBarrierPost = makeImageMemoryBarrier(VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_GENERAL, srcImage->get(), srcSubresourceRange);
2444 const VkBufferImageCopy dstCopyRegion = makeBufferImageCopy(dstImageResolution.x(), dstImageResolution.y(), levelNdx, layerNdx, dstImageResBlocked.x(), dstImageResBlocked.y());
2445 const VkImageMemoryBarrier dstInitImageBarrier = makeImageMemoryBarrier(0u, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_GENERAL, dstImage->get(), dstSubresourceRange);
2446
2447 const VkExtent2D framebufferSize (makeExtent2D(dstImageResolution[0], dstImageResolution[1]));
2448 const Move<VkFramebuffer> framebuffer (makeFramebuffer(vk, device, *renderPass, 0, DE_NULL, framebufferSize, SINGLE_LAYER));
2449
2450 // Upload source image data
2451 const Allocation& alloc = srcImageBuffer->getAllocation();
2452 deMemcpy(alloc.getHostPtr(), &m_srcData[levelNdx][layerNdx]->at(0), srcImageSizeInBytes);
2453 flushAlloc(vk, device, alloc);
2454
2455 beginCommandBuffer(vk, *cmdBuffer);
2456 vk.cmdBindPipeline(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipeline);
2457
2458 // Copy buffer to image
2459 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_HOST_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1u, &srcCopyBufferBarrierPre, 1u, &srcCopyImageBarrierPre);
2460 vk.cmdCopyBufferToImage(*cmdBuffer, srcImageBuffer->get(), srcImage->get(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &srcCopyRegion);
2461 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &srcCopyImageBarrierPost);
2462
2463 // Define destination image layout
2464 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0u, DE_NULL, 1u, &dstInitImageBarrier);
2465
2466 beginRenderPass(vk, *cmdBuffer, *renderPass, *framebuffer, renderSize);
2467
2468 DescriptorSetUpdateBuilder()
2469 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(0u), VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, &descriptorSrcImage)
2470 .writeSingle(*descriptorSet, DescriptorSetUpdateBuilder::Location::binding(1u), VK_DESCRIPTOR_TYPE_STORAGE_IMAGE, &descriptorDstImage)
2471 .update(vk, device);
2472
2473 vk.cmdBindDescriptorSets(*cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, *pipelineLayout, 0u, 1u, &descriptorSet.get(), 0u, DE_NULL);
2474 vk.cmdBindVertexBuffers(*cmdBuffer, 0u, 1u, &m_vertexBuffer->get(), &m_vertexBufferOffset);
2475
2476 vk.cmdSetViewport(*cmdBuffer, 0u, 1u, &viewport);
2477 vk.cmdSetScissor(*cmdBuffer, 0u, 1u, &scissor);
2478
2479 vk.cmdDraw(*cmdBuffer, (deUint32)m_vertexCount, 1, 0, 0);
2480
2481 endRenderPass(vk, *cmdBuffer);
2482
2483 const VkImageMemoryBarrier prepareForTransferBarrier = makeImageMemoryBarrier(
2484 VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_TRANSFER_READ_BIT,
2485 VK_IMAGE_LAYOUT_GENERAL, VK_IMAGE_LAYOUT_GENERAL,
2486 dstImage->get(), dstSubresourceRange);
2487
2488 const VkBufferMemoryBarrier copyBarrier = makeBufferMemoryBarrier(
2489 VK_ACCESS_TRANSFER_WRITE_BIT, VK_ACCESS_HOST_READ_BIT,
2490 dstImageBuffer->get(), 0ull, dstImageSizeInBytes);
2491
2492 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 0, (const VkBufferMemoryBarrier*)DE_NULL, 1, &prepareForTransferBarrier);
2493 vk.cmdCopyImageToBuffer(*cmdBuffer, dstImage->get(), VK_IMAGE_LAYOUT_GENERAL, dstImageBuffer->get(), 1u, &dstCopyRegion);
2494 vk.cmdPipelineBarrier(*cmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_HOST_BIT, (VkDependencyFlags)0, 0, (const VkMemoryBarrier*)DE_NULL, 1, ©Barrier, 0, (const VkImageMemoryBarrier*)DE_NULL);
2495
2496 endCommandBuffer(vk, *cmdBuffer);
2497
2498 submitCommandsAndWait(vk, device, queue, *cmdBuffer);
2499
2500 const Allocation& dstImageBufferAlloc = dstImageBuffer->getAllocation();
2501 invalidateAlloc(vk, device, dstImageBufferAlloc);
2502 deMemcpy(&m_dstData[levelNdx][layerNdx]->at(0), dstImageBufferAlloc.getHostPtr(), dstImageSizeInBytes);
2503 }
2504 }
2505
2506 m_compressedImage = dstImage;
2507 }
2508
2509 class TexelViewCompatibleCase : public TestCase
2510 {
2511 public:
2512 TexelViewCompatibleCase (TestContext& testCtx,
2513 const std::string& name,
2514 const std::string& desc,
2515 const TestParameters& parameters);
2516 void initPrograms (SourceCollections& programCollection) const;
2517 TestInstance* createInstance (Context& context) const;
2518 protected:
2519 const TestParameters m_parameters;
2520 };
2521
TexelViewCompatibleCase(TestContext & testCtx,const std::string & name,const std::string & desc,const TestParameters & parameters)2522 TexelViewCompatibleCase::TexelViewCompatibleCase (TestContext& testCtx, const std::string& name, const std::string& desc, const TestParameters& parameters)
2523 : TestCase (testCtx, name, desc)
2524 , m_parameters (parameters)
2525 {
2526 }
2527
initPrograms(vk::SourceCollections & programCollection) const2528 void TexelViewCompatibleCase::initPrograms (vk::SourceCollections& programCollection) const
2529 {
2530 DE_ASSERT(m_parameters.size.x() > 0);
2531 DE_ASSERT(m_parameters.size.y() > 0);
2532
2533 const unsigned int imageTypeIndex =
2534 (m_parameters.imageType == IMAGE_TYPE_2D) +
2535 (m_parameters.imageType == IMAGE_TYPE_3D) * 2;
2536
2537 switch (m_parameters.shader)
2538 {
2539 case SHADER_TYPE_COMPUTE:
2540 {
2541 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), m_parameters.imageType);
2542 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2543 std::ostringstream src;
2544 std::ostringstream src_decompress;
2545
2546 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n"
2547 << "layout (local_size_x = 1, local_size_y = 1, local_size_z = 1) in;\n\n";
2548 src_decompress << src.str();
2549
2550 switch(m_parameters.operation)
2551 {
2552 case OPERATION_IMAGE_LOAD:
2553 {
2554 const char* posDefinitions[3] =
2555 {
2556 // IMAGE_TYPE_1D
2557 " highp int pos = int(gl_GlobalInvocationID.x);\n",
2558 // IMAGE_TYPE_2D
2559 " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2560 // IMAGE_TYPE_3D
2561 " ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2562 };
2563
2564 src << "layout (binding = 0, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image0;\n"
2565 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2566 << "void main (void)\n"
2567 << "{\n"
2568 << posDefinitions[imageTypeIndex]
2569 << " imageStore(u_image1, pos, imageLoad(u_image0, pos));\n"
2570 << "}\n";
2571
2572 break;
2573 }
2574
2575 case OPERATION_TEXEL_FETCH:
2576 {
2577 const char* storeDefinitions[3] =
2578 {
2579 // IMAGE_TYPE_1D
2580 " imageStore(u_image1, pos.x, texelFetch(u_image0, pos.x, pos.z));\n",
2581 // IMAGE_TYPE_2D
2582 " imageStore(u_image1, pos.xy, texelFetch(u_image0, pos.xy, pos.z));\n",
2583 // IMAGE_TYPE_3D
2584 " imageStore(u_image1, pos, texelFetch(u_image0, pos, pos.z));\n",
2585 };
2586
2587 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2588 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2589 << "void main (void)\n"
2590 << "{\n"
2591 << " ivec3 pos = ivec3(gl_GlobalInvocationID.xyz);\n"
2592 << storeDefinitions[imageTypeIndex]
2593 << "}\n";
2594
2595 break;
2596 }
2597
2598 case OPERATION_TEXTURE:
2599 {
2600 const char* coordDefinitions[3] =
2601 {
2602 // IMAGE_TYPE_1D
2603 " const int pos = int(gl_GlobalInvocationID.x);\n"
2604 " const float coord = float(gl_GlobalInvocationID.x) / pixels_resolution.x;\n",
2605 // IMAGE_TYPE_2D
2606 " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n"
2607 " const vec2 coord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n",
2608 // IMAGE_TYPE_3D
2609 " const ivec3 pos = ivec3(gl_GlobalInvocationID.xy, 0);\n"
2610 " const vec2 v2 = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2611 " const vec3 coord = vec3(v2, 0.0);\n",
2612 };
2613
2614 src << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" u_image0;\n"
2615 << "layout (binding = 1, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image1;\n\n"
2616 << "void main (void)\n"
2617 << "{\n"
2618 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.x - 1, gl_NumWorkGroups.y - 1);\n"
2619 << coordDefinitions[imageTypeIndex]
2620 << " imageStore(u_image1, pos, texture(u_image0, coord));\n"
2621 << "}\n";
2622
2623 break;
2624 }
2625
2626 case OPERATION_IMAGE_STORE:
2627 {
2628 const char* posDefinitions[3] =
2629 {
2630 // IMAGE_TYPE_1D
2631 " highp int pos = int(gl_GlobalInvocationID.x);\n",
2632 // IMAGE_TYPE_2D
2633 " ivec2 pos = ivec2(gl_GlobalInvocationID.xy);\n",
2634 // IMAGE_TYPE_3D
2635 " ivec3 pos = ivec3(gl_GlobalInvocationID);\n",
2636 };
2637
2638 src << "layout (binding = 0, "<<formatQualifierStr<<") uniform "<<imageTypeStr<<" u_image0;\n"
2639 << "layout (binding = 1, "<<formatQualifierStr<<") readonly uniform "<<imageTypeStr<<" u_image1;\n"
2640 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" u_image2;\n\n"
2641 << "void main (void)\n"
2642 << "{\n"
2643 << posDefinitions[imageTypeIndex]
2644 << " imageStore(u_image0, pos, imageLoad(u_image1, pos));\n"
2645 << " imageStore(u_image2, pos, imageLoad(u_image0, pos));\n"
2646 << "}\n";
2647
2648 break;
2649 }
2650
2651 default:
2652 DE_ASSERT(false);
2653 }
2654
2655 const char* cordDefinitions[3] =
2656 {
2657 // IMAGE_TYPE_1D
2658 " const highp float cord = float(gl_GlobalInvocationID.x) / pixels_resolution.x;\n"
2659 " const highp int pos = int(gl_GlobalInvocationID.x); \n",
2660 // IMAGE_TYPE_2D
2661 " const vec2 cord = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2662 " const ivec2 pos = ivec2(gl_GlobalInvocationID.xy); \n",
2663 // IMAGE_TYPE_3D
2664 " const vec2 v2 = vec2(gl_GlobalInvocationID.xy) / vec2(pixels_resolution);\n"
2665 " const vec3 cord = vec3(v2, 0.0);\n"
2666 " const ivec3 pos = ivec3(gl_GlobalInvocationID); \n",
2667 };
2668 src_decompress << "layout (binding = 0) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_result;\n"
2669 << "layout (binding = 1) uniform "<<getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(m_parameters.imageType))<<" compressed_reference;\n"
2670 << "layout (binding = 2, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_result;\n"
2671 << "layout (binding = 3, "<<formatQualifierStr<<") writeonly uniform "<<imageTypeStr<<" decompressed_reference;\n\n"
2672 << "void main (void)\n"
2673 << "{\n"
2674 << " const vec2 pixels_resolution = vec2(gl_NumWorkGroups.xy);\n"
2675 << cordDefinitions[imageTypeIndex]
2676 << " imageStore(decompressed_result, pos, texture(compressed_result, cord));\n"
2677 << " imageStore(decompressed_reference, pos, texture(compressed_reference, cord));\n"
2678 << "}\n";
2679 programCollection.glslSources.add("comp") << glu::ComputeSource(src.str());
2680 programCollection.glslSources.add("decompress") << glu::ComputeSource(src_decompress.str());
2681
2682 break;
2683 }
2684
2685 case SHADER_TYPE_FRAGMENT:
2686 {
2687 ImageType imageTypeForFS = (m_parameters.imageType == IMAGE_TYPE_2D_ARRAY) ? IMAGE_TYPE_2D : m_parameters.imageType;
2688
2689 // Vertex shader
2690 {
2691 std::ostringstream src;
2692 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2693 << "layout(location = 0) in vec4 v_in_position;\n"
2694 << "\n"
2695 << "void main (void)\n"
2696 << "{\n"
2697 << " gl_Position = v_in_position;\n"
2698 << "}\n";
2699
2700 programCollection.glslSources.add("vert") << glu::VertexSource(src.str());
2701 }
2702
2703 // Fragment shader
2704 {
2705 switch(m_parameters.operation)
2706 {
2707 case OPERATION_ATTACHMENT_READ:
2708 case OPERATION_ATTACHMENT_WRITE:
2709 {
2710 std::ostringstream src;
2711
2712 const std::string dstTypeStr = getGlslFormatType(m_parameters.formatUncompressed);
2713 const std::string srcTypeStr = getGlslInputFormatType(m_parameters.formatUncompressed);
2714
2715 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2716 << "precision highp int;\n"
2717 << "precision highp float;\n"
2718 << "\n"
2719 << "layout (location = 0) out highp " << dstTypeStr << " o_color;\n"
2720 << "layout (input_attachment_index = 0, set = 0, binding = 0) uniform highp " << srcTypeStr << " inputImage1;\n"
2721 << "\n"
2722 << "void main (void)\n"
2723 << "{\n"
2724 << " o_color = " << dstTypeStr << "(subpassLoad(inputImage1));\n"
2725 << "}\n";
2726
2727 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2728
2729 break;
2730 }
2731
2732 case OPERATION_TEXTURE_READ:
2733 case OPERATION_TEXTURE_WRITE:
2734 {
2735 std::ostringstream src;
2736
2737 const std::string srcSamplerTypeStr = getGlslSamplerType(mapVkFormat(m_parameters.formatUncompressed), mapImageViewType(imageTypeForFS));
2738 const std::string dstImageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatUncompressed), imageTypeForFS);
2739 const std::string dstFormatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatUncompressed));
2740
2741 const char* inDefinitions[3] =
2742 {
2743 // IMAGE_TYPE_1D
2744 " const highp int out_pos = int(gl_FragCoord.x);\n"
2745 " const highp int pixels_resolution = textureSize(u_imageIn, 0) - 1;\n"
2746 " const highp float in_pos = float(out_pos) / pixels_resolution;\n",
2747 // IMAGE_TYPE_2D
2748 " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2749 " const ivec2 pixels_resolution = ivec2(textureSize(u_imageIn, 0)) - ivec2(1,1);\n"
2750 " const vec2 in_pos = vec2(out_pos) / vec2(pixels_resolution);\n",
2751 // IMAGE_TYPE_3D
2752 " const ivec3 out_pos = ivec3(gl_FragCoord.xy, 0);\n"
2753 " const ivec3 pixels_resolution = ivec3(textureSize(u_imageIn, 0)) - ivec3(1,1,1);\n"
2754 " const vec3 in_pos = vec3(out_pos) / vec3(pixels_resolution.xy, 1.0);\n",
2755 };
2756
2757 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2758 << "layout (binding = 0) uniform " << srcSamplerTypeStr << " u_imageIn;\n"
2759 << "layout (binding = 1, " << dstFormatQualifierStr << ") writeonly uniform " << dstImageTypeStr << " u_imageOut;\n"
2760 << "\n"
2761 << "void main (void)\n"
2762 << "{\n"
2763 << inDefinitions[imageTypeIndex]
2764 << " imageStore(u_imageOut, out_pos, texture(u_imageIn, in_pos));\n"
2765 << "}\n";
2766
2767 programCollection.glslSources.add("frag") << glu::FragmentSource(src.str());
2768
2769 break;
2770 }
2771
2772 default:
2773 DE_ASSERT(false);
2774 }
2775 }
2776
2777 // Verification fragment shader
2778 {
2779 std::ostringstream src;
2780
2781 const std::string samplerType = getGlslSamplerType(mapVkFormat(m_parameters.formatForVerify), mapImageViewType(imageTypeForFS));
2782 const std::string imageTypeStr = getShaderImageType(mapVkFormat(m_parameters.formatForVerify), imageTypeForFS);
2783 const std::string formatQualifierStr = getShaderImageFormatQualifier(mapVkFormat(m_parameters.formatForVerify));
2784
2785 const char* pos0Definitions[3] =
2786 {
2787 // IMAGE_TYPE_1D
2788 " const highp int out_pos = int(gl_FragCoord.x);\n"
2789 " const highp int pixels_resolution0 = textureSize(u_imageIn0, 0) - 1;\n"
2790 " const highp float in_pos0 = float(out_pos) / pixels_resolution0;\n",
2791 // IMAGE_TYPE_2D
2792 " const ivec2 out_pos = ivec2(gl_FragCoord.xy);\n"
2793 " const ivec2 pixels_resolution0 = ivec2(textureSize(u_imageIn0, 0)) - ivec2(1,1);\n"
2794 " const vec2 in_pos0 = vec2(out_pos) / vec2(pixels_resolution0);\n",
2795 // IMAGE_TYPE_3D
2796 " const ivec3 out_pos = ivec3(ivec2(gl_FragCoord.xy), 0);\n"
2797 " const ivec3 pixels_resolution0 = ivec3(textureSize(u_imageIn0, 0)) - ivec3(1,1,1);\n"
2798 " const vec3 in_pos0 = vec3(out_pos) / vec3(pixels_resolution0);\n",
2799 };
2800 const char* pos1Definitions[3] =
2801 {
2802 // IMAGE_TYPE_1D
2803 " const highp int pixels_resolution1 = textureSize(u_imageIn1, 0) - 1;\n"
2804 " const highp float in_pos1 = float(out_pos) / pixels_resolution1;\n",
2805 // IMAGE_TYPE_2D
2806 " const ivec2 pixels_resolution1 = ivec2(textureSize(u_imageIn1, 0)) - ivec2(1,1);\n"
2807 " const vec2 in_pos1 = vec2(out_pos) / vec2(pixels_resolution1);\n",
2808 // IMAGE_TYPE_3D
2809 " const ivec3 pixels_resolution1 = ivec3(textureSize(u_imageIn1, 0)) - ivec3(1,1,1);\n"
2810 " const vec3 in_pos1 = vec3(out_pos) / vec3(pixels_resolution1);\n",
2811 };
2812
2813 src << glu::getGLSLVersionDeclaration(glu::GLSL_VERSION_450) << "\n\n"
2814 << "layout (binding = 0) uniform " << samplerType << " u_imageIn0;\n"
2815 << "layout (binding = 1) uniform " << samplerType << " u_imageIn1;\n"
2816 << "layout (binding = 2, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut0;\n"
2817 << "layout (binding = 3, " << formatQualifierStr << ") writeonly uniform " << imageTypeStr << " u_imageOut1;\n"
2818 << "\n"
2819 << "void main (void)\n"
2820 << "{\n"
2821 << pos0Definitions[imageTypeIndex]
2822 << " imageStore(u_imageOut0, out_pos, texture(u_imageIn0, in_pos0));\n"
2823 << "\n"
2824 << pos1Definitions[imageTypeIndex]
2825 << " imageStore(u_imageOut1, out_pos, texture(u_imageIn1, in_pos1));\n"
2826 << "}\n";
2827
2828 programCollection.glslSources.add("frag_verify") << glu::FragmentSource(src.str());
2829 }
2830
2831 break;
2832 }
2833
2834 default:
2835 DE_ASSERT(false);
2836 }
2837 }
2838
createInstance(Context & context) const2839 TestInstance* TexelViewCompatibleCase::createInstance (Context& context) const
2840 {
2841 const VkPhysicalDevice physicalDevice = context.getPhysicalDevice();
2842 const InstanceInterface& vk = context.getInstanceInterface();
2843
2844 if (!m_parameters.useMipmaps)
2845 {
2846 DE_ASSERT(getNumLayers(m_parameters.imageType, m_parameters.size) == 1u);
2847 }
2848
2849 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).x() > 0u);
2850 DE_ASSERT(getLayerSize(m_parameters.imageType, m_parameters.size).y() > 0u);
2851
2852 if (!isDeviceExtensionSupported(context.getUsedApiVersion(), context.getDeviceExtensions(), "VK_KHR_maintenance2"))
2853 TCU_THROW(NotSupportedError, "Extension VK_KHR_maintenance2 not supported");
2854
2855 {
2856 VkImageFormatProperties imageFormatProperties;
2857
2858 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatUncompressed,
2859 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2860 m_parameters.uncompressedImageUsage, 0u, &imageFormatProperties))
2861 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2862
2863 if (VK_ERROR_FORMAT_NOT_SUPPORTED == vk.getPhysicalDeviceImageFormatProperties(physicalDevice, m_parameters.formatCompressed,
2864 mapImageType(m_parameters.imageType), VK_IMAGE_TILING_OPTIMAL,
2865 VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
2866 VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR | VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT | VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
2867 &imageFormatProperties))
2868 TCU_THROW(NotSupportedError, "Operation not supported with this image format");
2869 }
2870
2871 {
2872 const VkPhysicalDeviceFeatures physicalDeviceFeatures = getPhysicalDeviceFeatures (vk, physicalDevice);
2873
2874 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_BC1_RGB_UNORM_BLOCK, VK_FORMAT_BC7_SRGB_BLOCK) &&
2875 !physicalDeviceFeatures.textureCompressionBC)
2876 TCU_THROW(NotSupportedError, "textureCompressionBC not supported");
2877
2878 if (deInRange32(m_parameters.formatCompressed, VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, VK_FORMAT_EAC_R11G11_SNORM_BLOCK) &&
2879 !physicalDeviceFeatures.textureCompressionETC2)
2880 TCU_THROW(NotSupportedError, "textureCompressionETC2 not supported");
2881
2882 if (m_parameters.formatIsASTC &&
2883 !physicalDeviceFeatures.textureCompressionASTC_LDR)
2884 TCU_THROW(NotSupportedError, "textureCompressionASTC_LDR not supported");
2885
2886 if ((m_parameters.uncompressedImageUsage & VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT) &&
2887 isStorageImageExtendedFormat(m_parameters.formatUncompressed) &&
2888 !physicalDeviceFeatures.shaderStorageImageExtendedFormats)
2889 TCU_THROW(NotSupportedError, "Storage view format requires shaderStorageImageExtended");
2890 }
2891
2892 switch (m_parameters.shader)
2893 {
2894 case SHADER_TYPE_COMPUTE:
2895 {
2896 switch (m_parameters.operation)
2897 {
2898 case OPERATION_IMAGE_LOAD:
2899 case OPERATION_TEXEL_FETCH:
2900 case OPERATION_TEXTURE:
2901 return new BasicComputeTestInstance(context, m_parameters);
2902 case OPERATION_IMAGE_STORE:
2903 return new ImageStoreComputeTestInstance(context, m_parameters);
2904 default:
2905 TCU_THROW(InternalError, "Impossible");
2906 }
2907 }
2908
2909 case SHADER_TYPE_FRAGMENT:
2910 {
2911 switch (m_parameters.operation)
2912 {
2913 case OPERATION_ATTACHMENT_READ:
2914 case OPERATION_ATTACHMENT_WRITE:
2915 return new GraphicsAttachmentsTestInstance(context, m_parameters);
2916
2917 case OPERATION_TEXTURE_READ:
2918 case OPERATION_TEXTURE_WRITE:
2919 return new GraphicsTextureTestInstance(context, m_parameters);
2920
2921 default:
2922 TCU_THROW(InternalError, "Impossible");
2923 }
2924 }
2925
2926 default:
2927 TCU_THROW(InternalError, "Impossible");
2928 }
2929 }
2930
2931 } // anonymous ns
2932
getUnniceResolution(const VkFormat format,const deUint32 layers)2933 static tcu::UVec3 getUnniceResolution (const VkFormat format, const deUint32 layers)
2934 {
2935 const deUint32 unniceMipmapTextureSize[] = { 1, 1, 1, 8, 22, 48, 117, 275, 604, 208, 611, 274, 1211 };
2936 const deUint32 baseTextureWidth = unniceMipmapTextureSize[getBlockWidth(format)];
2937 const deUint32 baseTextureHeight = unniceMipmapTextureSize[getBlockHeight(format)];
2938 const deUint32 baseTextureWidthLevels = deLog2Floor32(baseTextureWidth);
2939 const deUint32 baseTextureHeightLevels = deLog2Floor32(baseTextureHeight);
2940 const deUint32 widthMultiplier = (baseTextureHeightLevels > baseTextureWidthLevels) ? 1u << (baseTextureHeightLevels - baseTextureWidthLevels) : 1u;
2941 const deUint32 heightMultiplier = (baseTextureWidthLevels > baseTextureHeightLevels) ? 1u << (baseTextureWidthLevels - baseTextureHeightLevels) : 1u;
2942 const deUint32 width = baseTextureWidth * widthMultiplier;
2943 const deUint32 height = baseTextureHeight * heightMultiplier;
2944
2945 // Number of levels should be same on both axises
2946 DE_ASSERT(deLog2Floor32(width) == deLog2Floor32(height));
2947
2948 return tcu::UVec3(width, height, layers);
2949 }
2950
createImageCompressionTranscodingTests(tcu::TestContext & testCtx)2951 tcu::TestCaseGroup* createImageCompressionTranscodingTests (tcu::TestContext& testCtx)
2952 {
2953 struct FormatsArray
2954 {
2955 const VkFormat* formats;
2956 deUint32 count;
2957 };
2958
2959 const bool mipmapness[] =
2960 {
2961 false,
2962 true,
2963 };
2964
2965 const std::string pipelineName[SHADER_TYPE_LAST] =
2966 {
2967 "compute",
2968 "graphic",
2969 };
2970
2971 const std::string mipmanpnessName[DE_LENGTH_OF_ARRAY(mipmapness)] =
2972 {
2973 "basic",
2974 "extended",
2975 };
2976
2977 const std::string operationName[OPERATION_LAST] =
2978 {
2979 "image_load",
2980 "texel_fetch",
2981 "texture",
2982 "image_store",
2983 "attachment_read",
2984 "attachment_write",
2985 "texture_read",
2986 "texture_write",
2987 };
2988
2989 struct ImageTypeName
2990 {
2991 ImageType type;
2992 std::string name;
2993 };
2994 ImageTypeName imageTypes[] =
2995 {
2996 { IMAGE_TYPE_1D, "1d_image" },
2997 { IMAGE_TYPE_2D, "2d_image" },
2998 { IMAGE_TYPE_3D, "3d_image" },
2999 };
3000
3001 const VkImageUsageFlags baseImageUsageFlagSet = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3002 const VkImageUsageFlags compressedImageUsageFlags[OPERATION_LAST] =
3003 {
3004 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), // "image_load"
3005 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texel_fetch"
3006 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture"
3007 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "image_store"
3008 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), // "attachment_read"
3009 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT), // "attachment_write"
3010 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), // "texture_read"
3011 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), // "texture_write"
3012 };
3013
3014 const VkImageUsageFlags compressedImageViewUsageFlags[OPERATION_LAST] =
3015 {
3016 compressedImageUsageFlags[0], //"image_load"
3017 compressedImageUsageFlags[1], //"texel_fetch"
3018 compressedImageUsageFlags[2], //"texture"
3019 compressedImageUsageFlags[3], //"image_store"
3020 compressedImageUsageFlags[4], //"attachment_read"
3021 compressedImageUsageFlags[5] | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, //"attachment_write"
3022 compressedImageUsageFlags[6], //"texture_read"
3023 compressedImageUsageFlags[7], //"texture_write"
3024 };
3025
3026 const VkImageUsageFlags uncompressedImageUsageFlags[OPERATION_LAST] =
3027 {
3028 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT), //"image_load"
3029 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texel_fetch"
3030 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"texture"
3031 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT), //"image_store"
3032 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"attachment_read"
3033 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT), //"attachment_write"
3034 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_read"
3035 baseImageUsageFlagSet | static_cast<VkImageUsageFlagBits>(VK_IMAGE_USAGE_SAMPLED_BIT), //"texture_write"
3036 };
3037
3038 const VkFormat compressedFormats64bit[] =
3039 {
3040 VK_FORMAT_BC1_RGB_UNORM_BLOCK,
3041 VK_FORMAT_BC1_RGB_SRGB_BLOCK,
3042 VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
3043 VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
3044 VK_FORMAT_BC4_UNORM_BLOCK,
3045 VK_FORMAT_BC4_SNORM_BLOCK,
3046 VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
3047 VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
3048 VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
3049 VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
3050 VK_FORMAT_EAC_R11_UNORM_BLOCK,
3051 VK_FORMAT_EAC_R11_SNORM_BLOCK,
3052 };
3053
3054 const VkFormat compressedFormats128bit[] =
3055 {
3056 VK_FORMAT_BC2_UNORM_BLOCK,
3057 VK_FORMAT_BC2_SRGB_BLOCK,
3058 VK_FORMAT_BC3_UNORM_BLOCK,
3059 VK_FORMAT_BC3_SRGB_BLOCK,
3060 VK_FORMAT_BC5_UNORM_BLOCK,
3061 VK_FORMAT_BC5_SNORM_BLOCK,
3062 VK_FORMAT_BC6H_UFLOAT_BLOCK,
3063 VK_FORMAT_BC6H_SFLOAT_BLOCK,
3064 VK_FORMAT_BC7_UNORM_BLOCK,
3065 VK_FORMAT_BC7_SRGB_BLOCK,
3066 VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
3067 VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
3068 VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
3069 VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
3070 VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
3071 VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
3072 VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
3073 VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
3074 VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
3075 VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
3076 VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
3077 VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
3078 VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
3079 VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
3080 VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
3081 VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
3082 VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
3083 VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
3084 VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
3085 VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
3086 VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
3087 VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
3088 VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
3089 VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
3090 VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
3091 VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
3092 VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
3093 VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
3094 VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
3095 VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
3096 VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
3097 VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
3098 };
3099
3100 const VkFormat uncompressedFormats64bit[] =
3101 {
3102 VK_FORMAT_R16G16B16A16_UNORM,
3103 VK_FORMAT_R16G16B16A16_SNORM,
3104 VK_FORMAT_R16G16B16A16_USCALED,
3105 VK_FORMAT_R16G16B16A16_SSCALED,
3106 VK_FORMAT_R16G16B16A16_UINT,
3107 VK_FORMAT_R16G16B16A16_SINT,
3108 //VK_FORMAT_R16G16B16A16_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3109 VK_FORMAT_R32G32_UINT,
3110 VK_FORMAT_R32G32_SINT,
3111 //VK_FORMAT_R32G32_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3112 //VK_FORMAT_R64_UINT, remove from the test it couldn't be used
3113 //VK_FORMAT_R64_SINT, remove from the test it couldn't be used
3114 //VK_FORMAT_R64_SFLOAT, remove from the test it couldn't be used
3115 };
3116
3117 const VkFormat uncompressedFormats128bit[] =
3118 {
3119 VK_FORMAT_R32G32B32A32_UINT,
3120 VK_FORMAT_R32G32B32A32_SINT,
3121 //VK_FORMAT_R32G32B32A32_SFLOAT, removed as float views can't preserve NAN/INF/Denorm values
3122 //VK_FORMAT_R64G64_UINT, remove from the test it couldn't be used
3123 //VK_FORMAT_R64G64_SINT, remove from the test it couldn't be used
3124 //VK_FORMAT_R64G64_SFLOAT, remove from the test it couldn't be used
3125 };
3126
3127 const FormatsArray formatsCompressedSets[] =
3128 {
3129 {
3130 compressedFormats64bit,
3131 DE_LENGTH_OF_ARRAY(compressedFormats64bit)
3132 },
3133 {
3134 compressedFormats128bit,
3135 DE_LENGTH_OF_ARRAY(compressedFormats128bit)
3136 },
3137 };
3138
3139 // Uncompressed formats - floating point formats should not be used in these
3140 // tests as they cannot be relied upon to preserve all possible values in the
3141 // underlying texture data. Refer to the note under the 'VkImageViewCreateInfo'
3142 // section of the specification.
3143 const FormatsArray formatsUncompressedSets[] =
3144 {
3145 {
3146 uncompressedFormats64bit,
3147 DE_LENGTH_OF_ARRAY(uncompressedFormats64bit)
3148 },
3149 {
3150 uncompressedFormats128bit,
3151 DE_LENGTH_OF_ARRAY(uncompressedFormats128bit)
3152 },
3153 };
3154
3155 DE_ASSERT(DE_LENGTH_OF_ARRAY(formatsCompressedSets) == DE_LENGTH_OF_ARRAY(formatsUncompressedSets));
3156
3157 MovePtr<tcu::TestCaseGroup> texelViewCompatibleTests (new tcu::TestCaseGroup(testCtx, "texel_view_compatible", "Texel view compatible cases"));
3158
3159 for (int shaderType = SHADER_TYPE_COMPUTE; shaderType < SHADER_TYPE_LAST; ++shaderType)
3160 {
3161 MovePtr<tcu::TestCaseGroup> pipelineTypeGroup (new tcu::TestCaseGroup(testCtx, pipelineName[shaderType].c_str(), ""));
3162
3163 for (int mipmapTestNdx = 0; mipmapTestNdx < DE_LENGTH_OF_ARRAY(mipmapness); mipmapTestNdx++)
3164 {
3165 const bool mipmapTest = mipmapness[mipmapTestNdx];
3166
3167 MovePtr<tcu::TestCaseGroup> mipmapTypeGroup (new tcu::TestCaseGroup(testCtx, mipmanpnessName[mipmapTestNdx].c_str(), ""));
3168
3169 for (int imageTypeNdx = 0; imageTypeNdx < DE_LENGTH_OF_ARRAY(imageTypes); imageTypeNdx++)
3170 {
3171 MovePtr<tcu::TestCaseGroup> imageTypeGroup (new tcu::TestCaseGroup(testCtx, imageTypes[imageTypeNdx].name.c_str(), ""));
3172 ImageType imageType = imageTypes[imageTypeNdx].type;
3173
3174 for (int operationNdx = OPERATION_IMAGE_LOAD; operationNdx < OPERATION_LAST; ++operationNdx)
3175 {
3176 if (shaderType != SHADER_TYPE_FRAGMENT && deInRange32(operationNdx, OPERATION_ATTACHMENT_READ, OPERATION_TEXTURE_WRITE))
3177 continue;
3178
3179 if (shaderType != SHADER_TYPE_COMPUTE && deInRange32(operationNdx, OPERATION_IMAGE_LOAD, OPERATION_IMAGE_STORE))
3180 continue;
3181
3182 if (imageType == IMAGE_TYPE_3D && (operationNdx == OPERATION_ATTACHMENT_READ || operationNdx == OPERATION_ATTACHMENT_WRITE))
3183 continue;
3184
3185 MovePtr<tcu::TestCaseGroup> imageOperationGroup (new tcu::TestCaseGroup(testCtx, operationName[operationNdx].c_str(), ""));
3186
3187 deUint32 depth = 1u + 2 * (imageType == IMAGE_TYPE_3D);
3188 deUint32 imageCount = 2u + (operationNdx == OPERATION_IMAGE_STORE);
3189
3190 // Iterate through bitness groups (64 bit, 128 bit, etc)
3191 for (deUint32 formatBitnessGroup = 0; formatBitnessGroup < DE_LENGTH_OF_ARRAY(formatsCompressedSets); ++formatBitnessGroup)
3192 {
3193 for (deUint32 formatCompressedNdx = 0; formatCompressedNdx < formatsCompressedSets[formatBitnessGroup].count; ++formatCompressedNdx)
3194 {
3195 const VkFormat formatCompressed = formatsCompressedSets[formatBitnessGroup].formats[formatCompressedNdx];
3196 const std::string compressedFormatGroupName = getFormatShortString(formatCompressed);
3197 MovePtr<tcu::TestCaseGroup> compressedFormatGroup (new tcu::TestCaseGroup(testCtx, compressedFormatGroupName.c_str(), ""));
3198
3199 for (deUint32 formatUncompressedNdx = 0; formatUncompressedNdx < formatsUncompressedSets[formatBitnessGroup].count; ++formatUncompressedNdx)
3200 {
3201 const VkFormat formatUncompressed = formatsUncompressedSets[formatBitnessGroup].formats[formatUncompressedNdx];
3202 const std::string uncompressedFormatGroupName = getFormatShortString(formatUncompressed);
3203
3204 const TestParameters parameters =
3205 {
3206 static_cast<Operation>(operationNdx),
3207 static_cast<ShaderType>(shaderType),
3208 mipmapTest ? getUnniceResolution(formatCompressed, 1u) : UVec3(64u, 64u, depth),
3209 1u + 2u * mipmapTest * (imageType != IMAGE_TYPE_3D), // 1 or 3 if mipmapTest is true but image is not 3d
3210 imageType,
3211 formatCompressed,
3212 formatUncompressed,
3213 imageCount,
3214 compressedImageUsageFlags[operationNdx],
3215 compressedImageViewUsageFlags[operationNdx],
3216 uncompressedImageUsageFlags[operationNdx],
3217 mipmapTest,
3218 VK_FORMAT_R8G8B8A8_UNORM,
3219 FormatIsASTC(formatCompressed)
3220 };
3221
3222 compressedFormatGroup->addChild(new TexelViewCompatibleCase(testCtx, uncompressedFormatGroupName, "", parameters));
3223 }
3224
3225 imageOperationGroup->addChild(compressedFormatGroup.release());
3226 }
3227 }
3228
3229 imageTypeGroup->addChild(imageOperationGroup.release());
3230 }
3231
3232 mipmapTypeGroup->addChild(imageTypeGroup.release());
3233 }
3234
3235 pipelineTypeGroup->addChild(mipmapTypeGroup.release());
3236 }
3237
3238 texelViewCompatibleTests->addChild(pipelineTypeGroup.release());
3239 }
3240
3241 return texelViewCompatibleTests.release();
3242 }
3243
3244 } // image
3245 } // vkt
3246