1 // Copyright (c) 2017 Google Inc.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <sstream>
16 #include <string>
17
18 #include "gmock/gmock.h"
19 #include "test/unit_spirv.h"
20 #include "test/val/val_fixtures.h"
21
22 namespace spvtools {
23 namespace val {
24 namespace {
25
26 using ::testing::HasSubstr;
27 using ::testing::Not;
28
29 using ValidateAtomics = spvtest::ValidateBase<bool>;
30
GenerateShaderCodeImpl(const std::string & body,const std::string & capabilities_and_extensions,const std::string & definitions,const std::string & memory_model,const std::string & execution)31 std::string GenerateShaderCodeImpl(
32 const std::string& body, const std::string& capabilities_and_extensions,
33 const std::string& definitions, const std::string& memory_model,
34 const std::string& execution) {
35 std::ostringstream ss;
36 ss << R"(
37 OpCapability Shader
38 )";
39 ss << capabilities_and_extensions;
40 ss << "OpMemoryModel Logical " << memory_model << "\n";
41 ss << execution;
42 ss << R"(
43 %void = OpTypeVoid
44 %func = OpTypeFunction %void
45 %bool = OpTypeBool
46 %f32 = OpTypeFloat 32
47 %u32 = OpTypeInt 32 0
48 %f32vec4 = OpTypeVector %f32 4
49
50 %f32_0 = OpConstant %f32 0
51 %f32_1 = OpConstant %f32 1
52 %u32_0 = OpConstant %u32 0
53 %u32_1 = OpConstant %u32 1
54 %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
55
56 %cross_device = OpConstant %u32 0
57 %device = OpConstant %u32 1
58 %workgroup = OpConstant %u32 2
59 %subgroup = OpConstant %u32 3
60 %invocation = OpConstant %u32 4
61 %queuefamily = OpConstant %u32 5
62
63 %relaxed = OpConstant %u32 0
64 %acquire = OpConstant %u32 2
65 %release = OpConstant %u32 4
66 %acquire_release = OpConstant %u32 8
67 %acquire_and_release = OpConstant %u32 6
68 %sequentially_consistent = OpConstant %u32 16
69 %acquire_release_uniform_workgroup = OpConstant %u32 328
70
71 %f32_ptr = OpTypePointer Workgroup %f32
72 %f32_var = OpVariable %f32_ptr Workgroup
73
74 %u32_ptr = OpTypePointer Workgroup %u32
75 %u32_var = OpVariable %u32_ptr Workgroup
76
77 %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
78 %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
79
80 %f32_ptr_function = OpTypePointer Function %f32
81 )";
82 ss << definitions;
83 ss << R"(
84 %main = OpFunction %void None %func
85 %main_entry = OpLabel
86 )";
87 ss << body;
88 ss << R"(
89 OpReturn
90 OpFunctionEnd)";
91
92 return ss.str();
93 }
94
GenerateShaderCode(const std::string & body,const std::string & capabilities_and_extensions="",const std::string & memory_model="GLSL450")95 std::string GenerateShaderCode(
96 const std::string& body,
97 const std::string& capabilities_and_extensions = "",
98 const std::string& memory_model = "GLSL450") {
99 const std::string execution = R"(
100 OpEntryPoint Fragment %main "main"
101 OpExecutionMode %main OriginUpperLeft
102 )";
103 const std::string defintions = R"(
104 %u64 = OpTypeInt 64 0
105 %s64 = OpTypeInt 64 1
106
107 %u64_1 = OpConstant %u64 1
108 %s64_1 = OpConstant %s64 1
109
110 %u64_ptr = OpTypePointer Workgroup %u64
111 %s64_ptr = OpTypePointer Workgroup %s64
112 %u64_var = OpVariable %u64_ptr Workgroup
113 %s64_var = OpVariable %s64_ptr Workgroup
114 )";
115 return GenerateShaderCodeImpl(
116 body, "OpCapability Int64\n" + capabilities_and_extensions, defintions,
117 memory_model, execution);
118 }
119
GenerateShaderComputeCode(const std::string & body,const std::string & capabilities_and_extensions="",const std::string & memory_model="GLSL450")120 std::string GenerateShaderComputeCode(
121 const std::string& body,
122 const std::string& capabilities_and_extensions = "",
123 const std::string& memory_model = "GLSL450") {
124 const std::string execution = R"(
125 OpEntryPoint GLCompute %main "main"
126 OpExecutionMode %main LocalSize 32 1 1
127 )";
128 const std::string defintions = R"(
129 %u64 = OpTypeInt 64 0
130 %s64 = OpTypeInt 64 1
131
132 %u64_1 = OpConstant %u64 1
133 %s64_1 = OpConstant %s64 1
134
135 %u64_ptr = OpTypePointer Workgroup %u64
136 %s64_ptr = OpTypePointer Workgroup %s64
137 %u64_var = OpVariable %u64_ptr Workgroup
138 %s64_var = OpVariable %s64_ptr Workgroup
139 )";
140 return GenerateShaderCodeImpl(
141 body, "OpCapability Int64\n" + capabilities_and_extensions, defintions,
142 memory_model, execution);
143 }
144
GenerateKernelCode(const std::string & body,const std::string & capabilities_and_extensions="")145 std::string GenerateKernelCode(
146 const std::string& body,
147 const std::string& capabilities_and_extensions = "") {
148 std::ostringstream ss;
149 ss << R"(
150 OpCapability Addresses
151 OpCapability Kernel
152 OpCapability Linkage
153 OpCapability Int64
154 )";
155
156 ss << capabilities_and_extensions;
157 ss << R"(
158 OpMemoryModel Physical32 OpenCL
159 %void = OpTypeVoid
160 %func = OpTypeFunction %void
161 %bool = OpTypeBool
162 %f32 = OpTypeFloat 32
163 %u32 = OpTypeInt 32 0
164 %u64 = OpTypeInt 64 0
165 %f32vec4 = OpTypeVector %f32 4
166
167 %f32_0 = OpConstant %f32 0
168 %f32_1 = OpConstant %f32 1
169 %u32_0 = OpConstant %u32 0
170 %u32_1 = OpConstant %u32 1
171 %u64_1 = OpConstant %u64 1
172 %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
173
174 %cross_device = OpConstant %u32 0
175 %device = OpConstant %u32 1
176 %workgroup = OpConstant %u32 2
177 %subgroup = OpConstant %u32 3
178 %invocation = OpConstant %u32 4
179
180 %relaxed = OpConstant %u32 0
181 %acquire = OpConstant %u32 2
182 %release = OpConstant %u32 4
183 %acquire_release = OpConstant %u32 8
184 %acquire_and_release = OpConstant %u32 6
185 %sequentially_consistent = OpConstant %u32 16
186 %acquire_release_uniform_workgroup = OpConstant %u32 328
187 %acquire_release_atomic_counter_workgroup = OpConstant %u32 1288
188
189 %f32_ptr = OpTypePointer Workgroup %f32
190 %f32_var = OpVariable %f32_ptr Workgroup
191
192 %u32_ptr = OpTypePointer Workgroup %u32
193 %u32_var = OpVariable %u32_ptr Workgroup
194
195 %u64_ptr = OpTypePointer Workgroup %u64
196 %u64_var = OpVariable %u64_ptr Workgroup
197
198 %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
199 %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
200
201 %f32_ptr_function = OpTypePointer Function %f32
202 %f32_ptr_uniformconstant = OpTypePointer UniformConstant %f32
203 %f32_uc_var = OpVariable %f32_ptr_uniformconstant UniformConstant
204
205 %f32_ptr_image = OpTypePointer Image %f32
206 %f32_im_var = OpVariable %f32_ptr_image Image
207
208 %main = OpFunction %void None %func
209 %main_entry = OpLabel
210 )";
211
212 ss << body;
213
214 ss << R"(
215 OpReturn
216 OpFunctionEnd)";
217
218 return ss.str();
219 }
220
TEST_F(ValidateAtomics,AtomicLoadShaderSuccess)221 TEST_F(ValidateAtomics, AtomicLoadShaderSuccess) {
222 const std::string body = R"(
223 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
224 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
225 %val3 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
226 )";
227
228 CompileSuccessfully(GenerateShaderCode(body));
229 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
230 }
231
TEST_F(ValidateAtomics,AtomicLoadKernelSuccess)232 TEST_F(ValidateAtomics, AtomicLoadKernelSuccess) {
233 const std::string body = R"(
234 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
235 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
236 %val3 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
237 )";
238
239 CompileSuccessfully(GenerateKernelCode(body));
240 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
241 }
242
TEST_F(ValidateAtomics,AtomicLoadInt32VulkanSuccess)243 TEST_F(ValidateAtomics, AtomicLoadInt32VulkanSuccess) {
244 const std::string body = R"(
245 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
246 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
247 )";
248
249 CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
250 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
251 }
252
TEST_F(ValidateAtomics,AtomicAddIntVulkanWrongType1)253 TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType1) {
254 const std::string body = R"(
255 %val1 = OpAtomicIAdd %f32 %f32_var %device %relaxed %f32_1
256 )";
257
258 CompileSuccessfully(GenerateShaderCode(body));
259 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
260 EXPECT_THAT(getDiagnosticString(),
261 HasSubstr("AtomicIAdd: "
262 "expected Result Type to be int scalar type"));
263 }
264
TEST_F(ValidateAtomics,AtomicAddIntVulkanWrongType2)265 TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType2) {
266 const std::string body = R"(
267 %val1 = OpAtomicIAdd %f32vec4 %f32vec4_var %device %relaxed %f32_1
268 )";
269
270 CompileSuccessfully(GenerateShaderCode(body));
271 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
272 EXPECT_THAT(getDiagnosticString(),
273 HasSubstr("AtomicIAdd: "
274 "expected Result Type to be integer scalar type"));
275 }
276
TEST_F(ValidateAtomics,AtomicAddFloatVulkan)277 TEST_F(ValidateAtomics, AtomicAddFloatVulkan) {
278 const std::string body = R"(
279 %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
280 )";
281
282 CompileSuccessfully(GenerateShaderCode(body));
283 ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
284 EXPECT_THAT(
285 getDiagnosticString(),
286 HasSubstr("Opcode AtomicFAddEXT requires one of these capabilities: "
287 "AtomicFloat32AddEXT AtomicFloat64AddEXT"));
288 }
289
TEST_F(ValidateAtomics,AtomicAddFloatVulkanWrongType1)290 TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType1) {
291 const std::string body = R"(
292 %val1 = OpAtomicFAddEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
293 )";
294 const std::string extra = R"(
295 OpCapability AtomicFloat32AddEXT
296 OpExtension "SPV_EXT_shader_atomic_float_add"
297 )";
298
299 CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
300 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
301 EXPECT_THAT(getDiagnosticString(),
302 HasSubstr("AtomicFAddEXT: "
303 "expected Result Type to be float scalar type"));
304 }
305
TEST_F(ValidateAtomics,AtomicAddFloatVulkanWrongType2)306 TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType2) {
307 const std::string body = R"(
308 %val1 = OpAtomicFAddEXT %u32 %u32_var %device %relaxed %u32_1
309 )";
310 const std::string extra = R"(
311 OpCapability AtomicFloat32AddEXT
312 OpExtension "SPV_EXT_shader_atomic_float_add"
313 )";
314
315 CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
316 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
317 EXPECT_THAT(getDiagnosticString(),
318 HasSubstr("AtomicFAddEXT: "
319 "expected Result Type to be float scalar type"));
320 }
321
TEST_F(ValidateAtomics,AtomicAddFloatVulkanWrongType3)322 TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType3) {
323 const std::string body = R"(
324 %val1 = OpAtomicFAddEXT %u64 %u64_var %device %relaxed %u64_1
325 )";
326 const std::string extra = R"(
327 OpCapability AtomicFloat32AddEXT
328 OpExtension "SPV_EXT_shader_atomic_float_add"
329 )";
330
331 CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
332 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
333 EXPECT_THAT(getDiagnosticString(),
334 HasSubstr("AtomicFAddEXT: "
335 "expected Result Type to be float scalar type"));
336 }
337
TEST_F(ValidateAtomics,AtomicAddFloatVulkanWrongCapability)338 TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongCapability) {
339 const std::string body = R"(
340 %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
341 )";
342 const std::string extra = R"(
343 OpCapability AtomicFloat64AddEXT
344 OpExtension "SPV_EXT_shader_atomic_float_add"
345 )";
346
347 CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
348 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
349 EXPECT_THAT(getDiagnosticString(),
350 HasSubstr("AtomicFAddEXT: float add atomics "
351 "require the AtomicFloat32AddEXT capability"));
352 }
353
TEST_F(ValidateAtomics,AtomicAddFloatVulkanSuccess)354 TEST_F(ValidateAtomics, AtomicAddFloatVulkanSuccess) {
355 const std::string body = R"(
356 %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
357 )";
358 const std::string extra = R"(
359 OpCapability AtomicFloat32AddEXT
360 OpExtension "SPV_EXT_shader_atomic_float_add"
361 )";
362
363 CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
364 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
365 }
366
TEST_F(ValidateAtomics,AtomicLoadFloatVulkan)367 TEST_F(ValidateAtomics, AtomicLoadFloatVulkan) {
368 const std::string body = R"(
369 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
370 %val2 = OpAtomicLoad %f32 %f32_var %workgroup %acquire
371 )";
372
373 CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
374 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
375 }
376
TEST_F(ValidateAtomics,AtomicStoreFloatVulkan)377 TEST_F(ValidateAtomics, AtomicStoreFloatVulkan) {
378 const std::string body = R"(
379 OpAtomicStore %f32_var %device %relaxed %f32_1
380 )";
381
382 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
383 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
384 }
385
TEST_F(ValidateAtomics,AtomicExchangeFloatVulkan)386 TEST_F(ValidateAtomics, AtomicExchangeFloatVulkan) {
387 const std::string body = R"(
388 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
389 )";
390
391 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
392 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
393 }
394
TEST_F(ValidateAtomics,AtomicLoadInt64WithCapabilityVulkanSuccess)395 TEST_F(ValidateAtomics, AtomicLoadInt64WithCapabilityVulkanSuccess) {
396 const std::string body = R"(
397 %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
398 %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
399 )";
400
401 CompileSuccessfully(
402 GenerateShaderComputeCode(body, "OpCapability Int64Atomics\n"),
403 SPV_ENV_VULKAN_1_0);
404 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
405 }
406
TEST_F(ValidateAtomics,AtomicLoadInt64WithoutCapabilityVulkan)407 TEST_F(ValidateAtomics, AtomicLoadInt64WithoutCapabilityVulkan) {
408 const std::string body = R"(
409 %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
410 %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
411 )";
412
413 CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
414 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
415 EXPECT_THAT(getDiagnosticString(),
416 HasSubstr("64-bit atomics require the Int64Atomics capability"));
417 }
418
TEST_F(ValidateAtomics,AtomicStoreOpenCLFunctionPointerStorageTypeSuccess)419 TEST_F(ValidateAtomics, AtomicStoreOpenCLFunctionPointerStorageTypeSuccess) {
420 const std::string body = R"(
421 %f32_var_function = OpVariable %f32_ptr_function Function
422 OpAtomicStore %f32_var_function %device %relaxed %f32_1
423 )";
424
425 CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_OPENCL_1_2);
426 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_OPENCL_1_2));
427 }
428
TEST_F(ValidateAtomics,AtomicStoreVulkanFunctionPointerStorageType)429 TEST_F(ValidateAtomics, AtomicStoreVulkanFunctionPointerStorageType) {
430 const std::string body = R"(
431 %f32_var_function = OpVariable %f32_ptr_function Function
432 OpAtomicStore %f32_var_function %device %relaxed %f32_1
433 )";
434
435 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
436 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
437 EXPECT_THAT(getDiagnosticString(),
438 AnyVUID("VUID-StandaloneSpirv-None-04686"));
439 EXPECT_THAT(
440 getDiagnosticString(),
441 HasSubstr("AtomicStore: Vulkan spec only allows storage classes for "
442 "atomic to be: Uniform, Workgroup, Image, StorageBuffer, or "
443 "PhysicalStorageBuffer."));
444 }
445
TEST_F(ValidateAtomics,AtomicStoreFunctionPointerStorageType)446 TEST_F(ValidateAtomics, AtomicStoreFunctionPointerStorageType) {
447 const std::string body = R"(
448 %f32_var_function = OpVariable %f32_ptr_function Function
449 OpAtomicStore %f32_var_function %device %relaxed %f32_1
450 )";
451
452 CompileSuccessfully(GenerateShaderCode(body));
453 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
454 EXPECT_THAT(getDiagnosticString(),
455 HasSubstr("AtomicStore: Function storage class forbidden when "
456 "the Shader capability is declared."));
457 }
458
459 // TODO(atgoo@github.com): the corresponding check fails Vulkan CTS,
460 // reenable once fixed.
TEST_F(ValidateAtomics,DISABLED_AtomicLoadVulkanSubgroup)461 TEST_F(ValidateAtomics, DISABLED_AtomicLoadVulkanSubgroup) {
462 const std::string body = R"(
463 %val1 = OpAtomicLoad %u32 %u32_var %subgroup %acquire
464 )";
465
466 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
467 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
468 EXPECT_THAT(getDiagnosticString(),
469 HasSubstr("AtomicLoad: in Vulkan environment memory scope is "
470 "limited to Device, Workgroup and Invocation"));
471 }
472
TEST_F(ValidateAtomics,AtomicLoadVulkanRelease)473 TEST_F(ValidateAtomics, AtomicLoadVulkanRelease) {
474 const std::string body = R"(
475 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %release
476 )";
477
478 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
479 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
480 EXPECT_THAT(
481 getDiagnosticString(),
482 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
483 "Release, AcquireRelease and SequentiallyConsistent"));
484 }
485
TEST_F(ValidateAtomics,AtomicLoadVulkanAcquireRelease)486 TEST_F(ValidateAtomics, AtomicLoadVulkanAcquireRelease) {
487 const std::string body = R"(
488 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %acquire_release
489 )";
490
491 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
492 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
493 EXPECT_THAT(
494 getDiagnosticString(),
495 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
496 "Release, AcquireRelease and SequentiallyConsistent"));
497 }
498
TEST_F(ValidateAtomics,AtomicLoadVulkanSequentiallyConsistent)499 TEST_F(ValidateAtomics, AtomicLoadVulkanSequentiallyConsistent) {
500 const std::string body = R"(
501 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
502 )";
503
504 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
505 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
506 EXPECT_THAT(
507 getDiagnosticString(),
508 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
509 "Release, AcquireRelease and SequentiallyConsistent"));
510 }
511
TEST_F(ValidateAtomics,AtomicLoadShaderFloat)512 TEST_F(ValidateAtomics, AtomicLoadShaderFloat) {
513 const std::string body = R"(
514 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
515 )";
516
517 CompileSuccessfully(GenerateShaderCode(body));
518 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
519 }
520
TEST_F(ValidateAtomics,AtomicLoadVulkanInt64)521 TEST_F(ValidateAtomics, AtomicLoadVulkanInt64) {
522 const std::string body = R"(
523 %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
524 )";
525
526 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
527 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
528 EXPECT_THAT(
529 getDiagnosticString(),
530 HasSubstr(
531 "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
532 }
533
TEST_F(ValidateAtomics,VK_KHR_shader_atomic_int64Success)534 TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64Success) {
535 const std::string body = R"(
536 %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
537 %val2 = OpAtomicUMax %u64 %u64_var %device %relaxed %u64_1
538 %val3 = OpAtomicSMin %u64 %u64_var %device %relaxed %u64_1
539 %val4 = OpAtomicSMax %u64 %u64_var %device %relaxed %u64_1
540 %val5 = OpAtomicAnd %u64 %u64_var %device %relaxed %u64_1
541 %val6 = OpAtomicOr %u64 %u64_var %device %relaxed %u64_1
542 %val7 = OpAtomicXor %u64 %u64_var %device %relaxed %u64_1
543 %val8 = OpAtomicIAdd %u64 %u64_var %device %relaxed %u64_1
544 %val9 = OpAtomicExchange %u64 %u64_var %device %relaxed %u64_1
545 %val10 = OpAtomicCompareExchange %u64 %u64_var %device %relaxed %relaxed %u64_1 %u64_1
546
547 %val11 = OpAtomicUMin %s64 %s64_var %device %relaxed %s64_1
548 %val12 = OpAtomicUMax %s64 %s64_var %device %relaxed %s64_1
549 %val13 = OpAtomicSMin %s64 %s64_var %device %relaxed %s64_1
550 %val14 = OpAtomicSMax %s64 %s64_var %device %relaxed %s64_1
551 %val15 = OpAtomicAnd %s64 %s64_var %device %relaxed %s64_1
552 %val16 = OpAtomicOr %s64 %s64_var %device %relaxed %s64_1
553 %val17 = OpAtomicXor %s64 %s64_var %device %relaxed %s64_1
554 %val18 = OpAtomicIAdd %s64 %s64_var %device %relaxed %s64_1
555 %val19 = OpAtomicExchange %s64 %s64_var %device %relaxed %s64_1
556 %val20 = OpAtomicCompareExchange %s64 %s64_var %device %relaxed %relaxed %s64_1 %s64_1
557
558 %val21 = OpAtomicLoad %u64 %u64_var %device %relaxed
559 %val22 = OpAtomicLoad %s64 %s64_var %device %relaxed
560
561 OpAtomicStore %u64_var %device %relaxed %u64_1
562 OpAtomicStore %s64_var %device %relaxed %s64_1
563 )";
564
565 CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"),
566 SPV_ENV_VULKAN_1_0);
567 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
568 }
569
TEST_F(ValidateAtomics,VK_KHR_shader_atomic_int64MissingCapability)570 TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64MissingCapability) {
571 const std::string body = R"(
572 %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
573 )";
574
575 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
576 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
577 EXPECT_THAT(
578 getDiagnosticString(),
579 HasSubstr(
580 "AtomicUMin: 64-bit atomics require the Int64Atomics capability"));
581 }
582
TEST_F(ValidateAtomics,AtomicLoadWrongResultType)583 TEST_F(ValidateAtomics, AtomicLoadWrongResultType) {
584 const std::string body = R"(
585 %val1 = OpAtomicLoad %f32vec4 %f32vec4_var %device %relaxed
586 )";
587
588 CompileSuccessfully(GenerateKernelCode(body));
589 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
590 EXPECT_THAT(getDiagnosticString(),
591 HasSubstr("AtomicLoad: "
592 "expected Result Type to be int or float scalar type"));
593 }
594
TEST_F(ValidateAtomics,AtomicLoadWrongPointerType)595 TEST_F(ValidateAtomics, AtomicLoadWrongPointerType) {
596 const std::string body = R"(
597 %val1 = OpAtomicLoad %f32 %f32_ptr %device %relaxed
598 )";
599
600 CompileSuccessfully(GenerateKernelCode(body));
601 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
602 EXPECT_THAT(getDiagnosticString(),
603 HasSubstr("Operand 27[%_ptr_Workgroup_float] cannot be a type"));
604 }
605
TEST_F(ValidateAtomics,AtomicLoadWrongPointerDataType)606 TEST_F(ValidateAtomics, AtomicLoadWrongPointerDataType) {
607 const std::string body = R"(
608 %val1 = OpAtomicLoad %u32 %f32_var %device %relaxed
609 )";
610
611 CompileSuccessfully(GenerateKernelCode(body));
612 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
613 EXPECT_THAT(
614 getDiagnosticString(),
615 HasSubstr("AtomicLoad: "
616 "expected Pointer to point to a value of type Result Type"));
617 }
618
TEST_F(ValidateAtomics,AtomicLoadWrongScopeType)619 TEST_F(ValidateAtomics, AtomicLoadWrongScopeType) {
620 const std::string body = R"(
621 %val1 = OpAtomicLoad %f32 %f32_var %f32_1 %relaxed
622 )";
623
624 CompileSuccessfully(GenerateKernelCode(body));
625 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
626 EXPECT_THAT(getDiagnosticString(),
627 HasSubstr("AtomicLoad: expected scope to be a 32-bit int"));
628 }
629
TEST_F(ValidateAtomics,AtomicLoadWrongMemorySemanticsType)630 TEST_F(ValidateAtomics, AtomicLoadWrongMemorySemanticsType) {
631 const std::string body = R"(
632 %val1 = OpAtomicLoad %f32 %f32_var %device %u64_1
633 )";
634
635 CompileSuccessfully(GenerateKernelCode(body));
636 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
637 EXPECT_THAT(
638 getDiagnosticString(),
639 HasSubstr("AtomicLoad: expected Memory Semantics to be a 32-bit int"));
640 }
641
TEST_F(ValidateAtomics,AtomicStoreKernelSuccess)642 TEST_F(ValidateAtomics, AtomicStoreKernelSuccess) {
643 const std::string body = R"(
644 OpAtomicStore %f32_var %device %relaxed %f32_1
645 OpAtomicStore %u32_var %subgroup %release %u32_1
646 )";
647
648 CompileSuccessfully(GenerateKernelCode(body));
649 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
650 }
651
TEST_F(ValidateAtomics,AtomicStoreShaderSuccess)652 TEST_F(ValidateAtomics, AtomicStoreShaderSuccess) {
653 const std::string body = R"(
654 OpAtomicStore %u32_var %device %release %u32_1
655 OpAtomicStore %u32_var %subgroup %sequentially_consistent %u32_1
656 )";
657
658 CompileSuccessfully(GenerateShaderCode(body));
659 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
660 }
661
TEST_F(ValidateAtomics,AtomicStoreVulkanSuccess)662 TEST_F(ValidateAtomics, AtomicStoreVulkanSuccess) {
663 const std::string body = R"(
664 OpAtomicStore %u32_var %device %release %u32_1
665 )";
666
667 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
668 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
669 }
670
TEST_F(ValidateAtomics,AtomicStoreVulkanAcquire)671 TEST_F(ValidateAtomics, AtomicStoreVulkanAcquire) {
672 const std::string body = R"(
673 OpAtomicStore %u32_var %device %acquire %u32_1
674 )";
675
676 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
677 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
678 EXPECT_THAT(
679 getDiagnosticString(),
680 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
681 "Acquire, AcquireRelease and SequentiallyConsistent"));
682 }
683
TEST_F(ValidateAtomics,AtomicStoreVulkanAcquireRelease)684 TEST_F(ValidateAtomics, AtomicStoreVulkanAcquireRelease) {
685 const std::string body = R"(
686 OpAtomicStore %u32_var %device %acquire_release %u32_1
687 )";
688
689 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
690 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
691 EXPECT_THAT(
692 getDiagnosticString(),
693 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
694 "Acquire, AcquireRelease and SequentiallyConsistent"));
695 }
696
TEST_F(ValidateAtomics,AtomicStoreVulkanSequentiallyConsistent)697 TEST_F(ValidateAtomics, AtomicStoreVulkanSequentiallyConsistent) {
698 const std::string body = R"(
699 OpAtomicStore %u32_var %device %sequentially_consistent %u32_1
700 )";
701
702 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
703 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
704 EXPECT_THAT(
705 getDiagnosticString(),
706 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
707 "Acquire, AcquireRelease and SequentiallyConsistent"));
708 }
709
TEST_F(ValidateAtomics,AtomicStoreWrongPointerType)710 TEST_F(ValidateAtomics, AtomicStoreWrongPointerType) {
711 const std::string body = R"(
712 OpAtomicStore %f32_1 %device %relaxed %f32_1
713 )";
714
715 CompileSuccessfully(GenerateKernelCode(body));
716 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
717 EXPECT_THAT(
718 getDiagnosticString(),
719 HasSubstr("AtomicStore: expected Pointer to be of type OpTypePointer"));
720 }
721
TEST_F(ValidateAtomics,AtomicStoreWrongPointerDataType)722 TEST_F(ValidateAtomics, AtomicStoreWrongPointerDataType) {
723 const std::string body = R"(
724 OpAtomicStore %f32vec4_var %device %relaxed %f32_1
725 )";
726
727 CompileSuccessfully(GenerateKernelCode(body));
728 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
729 EXPECT_THAT(
730 getDiagnosticString(),
731 HasSubstr("AtomicStore: "
732 "expected Pointer to be a pointer to int or float scalar "
733 "type"));
734 }
735
TEST_F(ValidateAtomics,AtomicStoreWrongPointerStorageTypeForOpenCL)736 TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageTypeForOpenCL) {
737 const std::string body = R"(
738 OpAtomicStore %f32_im_var %device %relaxed %f32_1
739 )";
740
741 CompileSuccessfully(GenerateKernelCode(body));
742 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_OPENCL_1_2));
743 EXPECT_THAT(
744 getDiagnosticString(),
745 HasSubstr("AtomicStore: storage class must be Function, Workgroup, "
746 "CrossWorkGroup or Generic in the OpenCL environment."));
747 }
748
TEST_F(ValidateAtomics,AtomicStoreWrongPointerStorageType)749 TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageType) {
750 const std::string body = R"(
751 OpAtomicStore %f32_uc_var %device %relaxed %f32_1
752 )";
753
754 CompileSuccessfully(GenerateKernelCode(body));
755 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
756 EXPECT_THAT(getDiagnosticString(),
757 HasSubstr("AtomicStore: storage class forbidden by universal "
758 "validation rules."));
759 }
760
TEST_F(ValidateAtomics,AtomicStoreWrongScopeType)761 TEST_F(ValidateAtomics, AtomicStoreWrongScopeType) {
762 const std::string body = R"(
763 OpAtomicStore %f32_var %f32_1 %relaxed %f32_1
764 )";
765
766 CompileSuccessfully(GenerateKernelCode(body));
767 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
768 EXPECT_THAT(getDiagnosticString(),
769 HasSubstr("AtomicStore: expected scope to be a 32-bit int\n "
770 "OpAtomicStore %28 %float_1 %uint_0_1 %float_1\n"));
771 }
772
TEST_F(ValidateAtomics,AtomicStoreWrongMemorySemanticsType)773 TEST_F(ValidateAtomics, AtomicStoreWrongMemorySemanticsType) {
774 const std::string body = R"(
775 OpAtomicStore %f32_var %device %f32_1 %f32_1
776 )";
777
778 CompileSuccessfully(GenerateKernelCode(body));
779 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
780 EXPECT_THAT(
781 getDiagnosticString(),
782 HasSubstr("AtomicStore: expected Memory Semantics to be a 32-bit int"));
783 }
784
TEST_F(ValidateAtomics,AtomicStoreWrongValueType)785 TEST_F(ValidateAtomics, AtomicStoreWrongValueType) {
786 const std::string body = R"(
787 OpAtomicStore %f32_var %device %relaxed %u32_1
788 )";
789
790 CompileSuccessfully(GenerateKernelCode(body));
791 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
792 EXPECT_THAT(
793 getDiagnosticString(),
794 HasSubstr("AtomicStore: "
795 "expected Value type and the type pointed to by Pointer to "
796 "be the same"));
797 }
798
TEST_F(ValidateAtomics,AtomicExchangeShaderSuccess)799 TEST_F(ValidateAtomics, AtomicExchangeShaderSuccess) {
800 const std::string body = R"(
801 OpAtomicStore %u32_var %device %relaxed %u32_1
802 %val2 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
803 )";
804
805 CompileSuccessfully(GenerateShaderCode(body));
806 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
807 }
808
TEST_F(ValidateAtomics,AtomicExchangeKernelSuccess)809 TEST_F(ValidateAtomics, AtomicExchangeKernelSuccess) {
810 const std::string body = R"(
811 OpAtomicStore %f32_var %device %relaxed %f32_1
812 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
813 OpAtomicStore %u32_var %device %relaxed %u32_1
814 %val4 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
815 )";
816
817 CompileSuccessfully(GenerateKernelCode(body));
818 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
819 }
820
TEST_F(ValidateAtomics,AtomicExchangeShaderFloat)821 TEST_F(ValidateAtomics, AtomicExchangeShaderFloat) {
822 const std::string body = R"(
823 OpAtomicStore %f32_var %device %relaxed %f32_1
824 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
825 )";
826
827 CompileSuccessfully(GenerateShaderCode(body));
828 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
829 }
830
TEST_F(ValidateAtomics,AtomicExchangeWrongResultType)831 TEST_F(ValidateAtomics, AtomicExchangeWrongResultType) {
832 const std::string body = R"(
833 OpStore %f32vec4_var %f32vec4_0000
834 %val2 = OpAtomicExchange %f32vec4 %f32vec4_var %device %relaxed %f32vec4_0000
835 )";
836
837 CompileSuccessfully(GenerateKernelCode(body));
838 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
839 EXPECT_THAT(getDiagnosticString(),
840 HasSubstr("AtomicExchange: "
841 "expected Result Type to be int or float scalar type"));
842 }
843
TEST_F(ValidateAtomics,AtomicExchangeWrongPointerType)844 TEST_F(ValidateAtomics, AtomicExchangeWrongPointerType) {
845 const std::string body = R"(
846 %val2 = OpAtomicExchange %f32 %f32vec4_ptr %device %relaxed %f32vec4_0000
847 )";
848
849 CompileSuccessfully(GenerateKernelCode(body));
850 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
851 EXPECT_THAT(getDiagnosticString(),
852 HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
853 "type"));
854 }
855
TEST_F(ValidateAtomics,AtomicExchangeWrongPointerDataType)856 TEST_F(ValidateAtomics, AtomicExchangeWrongPointerDataType) {
857 const std::string body = R"(
858 OpStore %f32vec4_var %f32vec4_0000
859 %val2 = OpAtomicExchange %f32 %f32vec4_var %device %relaxed %f32vec4_0000
860 )";
861
862 CompileSuccessfully(GenerateKernelCode(body));
863 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
864 EXPECT_THAT(
865 getDiagnosticString(),
866 HasSubstr("AtomicExchange: "
867 "expected Pointer to point to a value of type Result Type"));
868 }
869
TEST_F(ValidateAtomics,AtomicExchangeWrongScopeType)870 TEST_F(ValidateAtomics, AtomicExchangeWrongScopeType) {
871 const std::string body = R"(
872 OpAtomicStore %f32_var %device %relaxed %f32_1
873 %val2 = OpAtomicExchange %f32 %f32_var %f32_1 %relaxed %f32_0
874 )";
875
876 CompileSuccessfully(GenerateKernelCode(body));
877 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
878 EXPECT_THAT(getDiagnosticString(),
879 HasSubstr("AtomicExchange: expected scope to be a 32-bit int"));
880 }
881
TEST_F(ValidateAtomics,AtomicExchangeWrongMemorySemanticsType)882 TEST_F(ValidateAtomics, AtomicExchangeWrongMemorySemanticsType) {
883 const std::string body = R"(
884 OpAtomicStore %f32_var %device %relaxed %f32_1
885 %val2 = OpAtomicExchange %f32 %f32_var %device %f32_1 %f32_0
886 )";
887
888 CompileSuccessfully(GenerateKernelCode(body));
889 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
890 EXPECT_THAT(
891 getDiagnosticString(),
892 HasSubstr(
893 "AtomicExchange: expected Memory Semantics to be a 32-bit int"));
894 }
895
TEST_F(ValidateAtomics,AtomicExchangeWrongValueType)896 TEST_F(ValidateAtomics, AtomicExchangeWrongValueType) {
897 const std::string body = R"(
898 OpAtomicStore %f32_var %device %relaxed %f32_1
899 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %u32_0
900 )";
901
902 CompileSuccessfully(GenerateKernelCode(body));
903 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
904 EXPECT_THAT(getDiagnosticString(),
905 HasSubstr("AtomicExchange: "
906 "expected Value to be of type Result Type"));
907 }
908
TEST_F(ValidateAtomics,AtomicCompareExchangeShaderSuccess)909 TEST_F(ValidateAtomics, AtomicCompareExchangeShaderSuccess) {
910 const std::string body = R"(
911 OpAtomicStore %u32_var %device %relaxed %u32_1
912 %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
913 )";
914
915 CompileSuccessfully(GenerateShaderCode(body));
916 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
917 }
918
TEST_F(ValidateAtomics,AtomicCompareExchangeKernelSuccess)919 TEST_F(ValidateAtomics, AtomicCompareExchangeKernelSuccess) {
920 const std::string body = R"(
921 OpAtomicStore %f32_var %device %relaxed %f32_1
922 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
923 OpAtomicStore %u32_var %device %relaxed %u32_1
924 %val4 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
925 )";
926
927 CompileSuccessfully(GenerateKernelCode(body));
928 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
929 }
930
TEST_F(ValidateAtomics,AtomicCompareExchangeShaderFloat)931 TEST_F(ValidateAtomics, AtomicCompareExchangeShaderFloat) {
932 const std::string body = R"(
933 OpAtomicStore %f32_var %device %relaxed %f32_1
934 %val1 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
935 )";
936
937 CompileSuccessfully(GenerateShaderCode(body));
938 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
939 EXPECT_THAT(getDiagnosticString(),
940 HasSubstr("AtomicCompareExchange: "
941 "expected Result Type to be int scalar type"));
942 }
943
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongResultType)944 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongResultType) {
945 const std::string body = R"(
946 OpStore %f32vec4_var %f32vec4_0000
947 %val2 = OpAtomicCompareExchange %f32vec4 %f32vec4_var %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
948 )";
949
950 CompileSuccessfully(GenerateKernelCode(body));
951 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
952 EXPECT_THAT(getDiagnosticString(),
953 HasSubstr("AtomicCompareExchange: "
954 "expected Result Type to be int or float scalar type"));
955 }
956
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongPointerType)957 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerType) {
958 const std::string body = R"(
959 %val2 = OpAtomicCompareExchange %f32 %f32vec4_ptr %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
960 )";
961
962 CompileSuccessfully(GenerateKernelCode(body));
963 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
964 EXPECT_THAT(getDiagnosticString(),
965 HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
966 "type"));
967 }
968
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongPointerDataType)969 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerDataType) {
970 const std::string body = R"(
971 OpStore %f32vec4_var %f32vec4_0000
972 %val2 = OpAtomicCompareExchange %f32 %f32vec4_var %device %relaxed %relaxed %f32_0 %f32_1
973 )";
974
975 CompileSuccessfully(GenerateKernelCode(body));
976 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
977 EXPECT_THAT(
978 getDiagnosticString(),
979 HasSubstr("AtomicCompareExchange: "
980 "expected Pointer to point to a value of type Result Type"));
981 }
982
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongScopeType)983 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongScopeType) {
984 const std::string body = R"(
985 OpAtomicStore %f32_var %device %relaxed %f32_1
986 %val2 = OpAtomicCompareExchange %f32 %f32_var %f32_1 %relaxed %relaxed %f32_0 %f32_0
987 )";
988
989 CompileSuccessfully(GenerateKernelCode(body));
990 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
991 EXPECT_THAT(getDiagnosticString(),
992 HasSubstr("AtomicCompareExchange: expected scope to be a 32-bit "
993 "int"));
994 }
995
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongMemorySemanticsType1)996 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType1) {
997 const std::string body = R"(
998 OpAtomicStore %f32_var %device %relaxed %f32_1
999 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %f32_1 %relaxed %f32_0 %f32_0
1000 )";
1001
1002 CompileSuccessfully(GenerateKernelCode(body));
1003 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1004 EXPECT_THAT(getDiagnosticString(),
1005 HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
1006 "be a 32-bit int"));
1007 }
1008
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongMemorySemanticsType2)1009 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType2) {
1010 const std::string body = R"(
1011 OpAtomicStore %f32_var %device %relaxed %f32_1
1012 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %f32_1 %f32_0 %f32_0
1013 )";
1014
1015 CompileSuccessfully(GenerateKernelCode(body));
1016 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1017 EXPECT_THAT(getDiagnosticString(),
1018 HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
1019 "be a 32-bit int"));
1020 }
1021
TEST_F(ValidateAtomics,AtomicCompareExchangeUnequalRelease)1022 TEST_F(ValidateAtomics, AtomicCompareExchangeUnequalRelease) {
1023 const std::string body = R"(
1024 OpAtomicStore %f32_var %device %relaxed %f32_1
1025 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %release %f32_0 %f32_0
1026 )";
1027
1028 CompileSuccessfully(GenerateKernelCode(body));
1029 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1030 EXPECT_THAT(getDiagnosticString(),
1031 HasSubstr("AtomicCompareExchange: Memory Semantics Release and "
1032 "AcquireRelease cannot be used for operand Unequal"));
1033 }
1034
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongValueType)1035 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongValueType) {
1036 const std::string body = R"(
1037 OpAtomicStore %f32_var %device %relaxed %f32_1
1038 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %u32_0 %f32_1
1039 )";
1040
1041 CompileSuccessfully(GenerateKernelCode(body));
1042 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1043 EXPECT_THAT(getDiagnosticString(),
1044 HasSubstr("AtomicCompareExchange: "
1045 "expected Value to be of type Result Type"));
1046 }
1047
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongComparatorType)1048 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongComparatorType) {
1049 const std::string body = R"(
1050 OpAtomicStore %f32_var %device %relaxed %f32_1
1051 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %u32_1
1052 )";
1053
1054 CompileSuccessfully(GenerateKernelCode(body));
1055 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1056 EXPECT_THAT(getDiagnosticString(),
1057 HasSubstr("AtomicCompareExchange: "
1058 "expected Comparator to be of type Result Type"));
1059 }
1060
TEST_F(ValidateAtomics,AtomicCompareExchangeWeakSuccess)1061 TEST_F(ValidateAtomics, AtomicCompareExchangeWeakSuccess) {
1062 const std::string body = R"(
1063 OpAtomicStore %u32_var %device %relaxed %u32_1
1064 %val4 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
1065 )";
1066
1067 CompileSuccessfully(GenerateKernelCode(body));
1068 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1069 }
1070
TEST_F(ValidateAtomics,AtomicCompareExchangeWeakWrongResultType)1071 TEST_F(ValidateAtomics, AtomicCompareExchangeWeakWrongResultType) {
1072 const std::string body = R"(
1073 OpAtomicStore %f32_var %device %relaxed %f32_1
1074 %val2 = OpAtomicCompareExchangeWeak %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
1075 )";
1076
1077 CompileSuccessfully(GenerateKernelCode(body));
1078 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1079 EXPECT_THAT(getDiagnosticString(),
1080 HasSubstr("AtomicCompareExchangeWeak: "
1081 "expected Result Type to be int scalar type"));
1082 }
1083
TEST_F(ValidateAtomics,AtomicArithmeticsSuccess)1084 TEST_F(ValidateAtomics, AtomicArithmeticsSuccess) {
1085 const std::string body = R"(
1086 OpAtomicStore %u32_var %device %relaxed %u32_1
1087 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release
1088 %val2 = OpAtomicIDecrement %u32 %u32_var %device %acquire_release
1089 %val3 = OpAtomicIAdd %u32 %u32_var %device %acquire_release %u32_1
1090 %val4 = OpAtomicISub %u32 %u32_var %device %acquire_release %u32_1
1091 %val5 = OpAtomicUMin %u32 %u32_var %device %acquire_release %u32_1
1092 %val6 = OpAtomicUMax %u32 %u32_var %device %acquire_release %u32_1
1093 %val7 = OpAtomicSMin %u32 %u32_var %device %sequentially_consistent %u32_1
1094 %val8 = OpAtomicSMax %u32 %u32_var %device %sequentially_consistent %u32_1
1095 %val9 = OpAtomicAnd %u32 %u32_var %device %sequentially_consistent %u32_1
1096 %val10 = OpAtomicOr %u32 %u32_var %device %sequentially_consistent %u32_1
1097 %val11 = OpAtomicXor %u32 %u32_var %device %sequentially_consistent %u32_1
1098 )";
1099
1100 CompileSuccessfully(GenerateKernelCode(body));
1101 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1102 }
1103
TEST_F(ValidateAtomics,AtomicFlagsSuccess)1104 TEST_F(ValidateAtomics, AtomicFlagsSuccess) {
1105 const std::string body = R"(
1106 OpAtomicFlagClear %u32_var %device %release
1107 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %relaxed
1108 )";
1109
1110 CompileSuccessfully(GenerateKernelCode(body));
1111 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1112 }
1113
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongResultType)1114 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongResultType) {
1115 const std::string body = R"(
1116 %val1 = OpAtomicFlagTestAndSet %u32 %u32_var %device %relaxed
1117 )";
1118
1119 CompileSuccessfully(GenerateKernelCode(body));
1120 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1121 EXPECT_THAT(getDiagnosticString(),
1122 HasSubstr("AtomicFlagTestAndSet: "
1123 "expected Result Type to be bool scalar type"));
1124 }
1125
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotPointer)1126 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotPointer) {
1127 const std::string body = R"(
1128 %val1 = OpAtomicFlagTestAndSet %bool %u32_1 %device %relaxed
1129 )";
1130
1131 CompileSuccessfully(GenerateKernelCode(body));
1132 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1133 EXPECT_THAT(getDiagnosticString(),
1134 HasSubstr("AtomicFlagTestAndSet: "
1135 "expected Pointer to be of type OpTypePointer"));
1136 }
1137
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotIntPointer)1138 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotIntPointer) {
1139 const std::string body = R"(
1140 %val1 = OpAtomicFlagTestAndSet %bool %f32_var %device %relaxed
1141 )";
1142
1143 CompileSuccessfully(GenerateKernelCode(body));
1144 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1145 EXPECT_THAT(
1146 getDiagnosticString(),
1147 HasSubstr("AtomicFlagTestAndSet: "
1148 "expected Pointer to point to a value of 32-bit int type"));
1149 }
1150
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotInt32Pointer)1151 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotInt32Pointer) {
1152 const std::string body = R"(
1153 %val1 = OpAtomicFlagTestAndSet %bool %u64_var %device %relaxed
1154 )";
1155
1156 CompileSuccessfully(GenerateKernelCode(body));
1157 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1158 EXPECT_THAT(
1159 getDiagnosticString(),
1160 HasSubstr("AtomicFlagTestAndSet: "
1161 "expected Pointer to point to a value of 32-bit int type"));
1162 }
1163
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongScopeType)1164 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongScopeType) {
1165 const std::string body = R"(
1166 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %u64_1 %relaxed
1167 )";
1168
1169 CompileSuccessfully(GenerateKernelCode(body));
1170 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1171 EXPECT_THAT(
1172 getDiagnosticString(),
1173 HasSubstr("AtomicFlagTestAndSet: expected scope to be a 32-bit int"));
1174 }
1175
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongMemorySemanticsType)1176 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongMemorySemanticsType) {
1177 const std::string body = R"(
1178 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %u64_1
1179 )";
1180
1181 CompileSuccessfully(GenerateKernelCode(body));
1182 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1183 EXPECT_THAT(getDiagnosticString(),
1184 HasSubstr("AtomicFlagTestAndSet: "
1185 "expected Memory Semantics to be a 32-bit int"));
1186 }
1187
TEST_F(ValidateAtomics,AtomicFlagClearAcquire)1188 TEST_F(ValidateAtomics, AtomicFlagClearAcquire) {
1189 const std::string body = R"(
1190 OpAtomicFlagClear %u32_var %device %acquire
1191 )";
1192
1193 CompileSuccessfully(GenerateKernelCode(body));
1194 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1195 EXPECT_THAT(getDiagnosticString(),
1196 HasSubstr("Memory Semantics Acquire and AcquireRelease cannot be "
1197 "used with AtomicFlagClear"));
1198 }
1199
TEST_F(ValidateAtomics,AtomicFlagClearNotPointer)1200 TEST_F(ValidateAtomics, AtomicFlagClearNotPointer) {
1201 const std::string body = R"(
1202 OpAtomicFlagClear %u32_1 %device %relaxed
1203 )";
1204
1205 CompileSuccessfully(GenerateKernelCode(body));
1206 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1207 EXPECT_THAT(getDiagnosticString(),
1208 HasSubstr("AtomicFlagClear: "
1209 "expected Pointer to be of type OpTypePointer"));
1210 }
1211
TEST_F(ValidateAtomics,AtomicFlagClearNotIntPointer)1212 TEST_F(ValidateAtomics, AtomicFlagClearNotIntPointer) {
1213 const std::string body = R"(
1214 OpAtomicFlagClear %f32_var %device %relaxed
1215 )";
1216
1217 CompileSuccessfully(GenerateKernelCode(body));
1218 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1219 EXPECT_THAT(
1220 getDiagnosticString(),
1221 HasSubstr("AtomicFlagClear: "
1222 "expected Pointer to point to a value of 32-bit int type"));
1223 }
1224
TEST_F(ValidateAtomics,AtomicFlagClearNotInt32Pointer)1225 TEST_F(ValidateAtomics, AtomicFlagClearNotInt32Pointer) {
1226 const std::string body = R"(
1227 OpAtomicFlagClear %u64_var %device %relaxed
1228 )";
1229
1230 CompileSuccessfully(GenerateKernelCode(body));
1231 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1232 EXPECT_THAT(
1233 getDiagnosticString(),
1234 HasSubstr("AtomicFlagClear: "
1235 "expected Pointer to point to a value of 32-bit int type"));
1236 }
1237
TEST_F(ValidateAtomics,AtomicFlagClearWrongScopeType)1238 TEST_F(ValidateAtomics, AtomicFlagClearWrongScopeType) {
1239 const std::string body = R"(
1240 OpAtomicFlagClear %u32_var %u64_1 %relaxed
1241 )";
1242
1243 CompileSuccessfully(GenerateKernelCode(body));
1244 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1245 EXPECT_THAT(getDiagnosticString(),
1246 HasSubstr("AtomicFlagClear: expected scope to be a 32-bit "
1247 "int\n OpAtomicFlagClear %30 %ulong_1 %uint_0_1\n"));
1248 }
1249
TEST_F(ValidateAtomics,AtomicFlagClearWrongMemorySemanticsType)1250 TEST_F(ValidateAtomics, AtomicFlagClearWrongMemorySemanticsType) {
1251 const std::string body = R"(
1252 OpAtomicFlagClear %u32_var %device %u64_1
1253 )";
1254
1255 CompileSuccessfully(GenerateKernelCode(body));
1256 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1257 EXPECT_THAT(
1258 getDiagnosticString(),
1259 HasSubstr(
1260 "AtomicFlagClear: expected Memory Semantics to be a 32-bit int"));
1261 }
1262
TEST_F(ValidateAtomics,AtomicIIncrementAcquireAndRelease)1263 TEST_F(ValidateAtomics, AtomicIIncrementAcquireAndRelease) {
1264 const std::string body = R"(
1265 OpAtomicStore %u32_var %device %relaxed %u32_1
1266 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_and_release
1267 )";
1268
1269 CompileSuccessfully(GenerateKernelCode(body));
1270 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1271 EXPECT_THAT(getDiagnosticString(),
1272 HasSubstr("AtomicIIncrement: Memory Semantics can have at most "
1273 "one of the following bits set: Acquire, Release, "
1274 "AcquireRelease or SequentiallyConsistent"));
1275 }
1276
TEST_F(ValidateAtomics,AtomicUniformMemorySemanticsShader)1277 TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsShader) {
1278 const std::string body = R"(
1279 OpAtomicStore %u32_var %device %relaxed %u32_1
1280 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
1281 )";
1282
1283 CompileSuccessfully(GenerateShaderCode(body));
1284 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1285 }
1286
TEST_F(ValidateAtomics,AtomicUniformMemorySemanticsKernel)1287 TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsKernel) {
1288 const std::string body = R"(
1289 OpAtomicStore %u32_var %device %relaxed %u32_1
1290 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
1291 )";
1292
1293 CompileSuccessfully(GenerateKernelCode(body));
1294 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1295 EXPECT_THAT(getDiagnosticString(),
1296 HasSubstr("AtomicIIncrement: Memory Semantics UniformMemory "
1297 "requires capability Shader"));
1298 }
1299
1300 // Lack of the AtomicStorage capability is intentionally ignored, see
1301 // https://github.com/KhronosGroup/glslang/issues/1618 for the reasoning why.
TEST_F(ValidateAtomics,AtomicCounterMemorySemanticsNoCapability)1302 TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsNoCapability) {
1303 const std::string body = R"(
1304 OpAtomicStore %u32_var %device %relaxed %u32_1
1305 %val1 = OpAtomicIIncrement %u32 %u32_var %device
1306 %acquire_release_atomic_counter_workgroup
1307 )";
1308
1309 CompileSuccessfully(GenerateKernelCode(body));
1310 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1311 }
1312
TEST_F(ValidateAtomics,AtomicCounterMemorySemanticsWithCapability)1313 TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsWithCapability) {
1314 const std::string body = R"(
1315 OpAtomicStore %u32_var %device %relaxed %u32_1
1316 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_atomic_counter_workgroup
1317 )";
1318
1319 CompileSuccessfully(GenerateKernelCode(body, "OpCapability AtomicStorage\n"));
1320 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1321 }
1322
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicLoad)1323 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicLoad) {
1324 const std::string body = R"(
1325 %ld = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
1326 )";
1327
1328 const std::string extra = R"(
1329 OpCapability VulkanMemoryModelKHR
1330 OpExtension "SPV_KHR_vulkan_memory_model"
1331 )";
1332
1333 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1334 SPV_ENV_UNIVERSAL_1_3);
1335 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1336 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1337 EXPECT_THAT(getDiagnosticString(),
1338 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1339 "used with the VulkanKHR memory model."));
1340 }
1341
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicStore)1342 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicStore) {
1343 const std::string body = R"(
1344 OpAtomicStore %u32_var %workgroup %sequentially_consistent %u32_0
1345 )";
1346
1347 const std::string extra = R"(
1348 OpCapability VulkanMemoryModelKHR
1349 OpExtension "SPV_KHR_vulkan_memory_model"
1350 )";
1351
1352 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1353 SPV_ENV_UNIVERSAL_1_3);
1354 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1355 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1356 EXPECT_THAT(getDiagnosticString(),
1357 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1358 "used with the VulkanKHR memory model."));
1359 }
1360
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicExchange)1361 TEST_F(ValidateAtomics,
1362 VulkanMemoryModelBanSequentiallyConsistentAtomicExchange) {
1363 const std::string body = R"(
1364 %ex = OpAtomicExchange %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1365 )";
1366
1367 const std::string extra = R"(
1368 OpCapability VulkanMemoryModelKHR
1369 OpExtension "SPV_KHR_vulkan_memory_model"
1370 )";
1371
1372 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1373 SPV_ENV_UNIVERSAL_1_3);
1374 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1375 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1376 EXPECT_THAT(getDiagnosticString(),
1377 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1378 "used with the VulkanKHR memory model."));
1379 }
1380
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual)1381 TEST_F(ValidateAtomics,
1382 VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual) {
1383 const std::string body = R"(
1384 %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %sequentially_consistent %relaxed %u32_0 %u32_0
1385 )";
1386
1387 const std::string extra = R"(
1388 OpCapability VulkanMemoryModelKHR
1389 OpExtension "SPV_KHR_vulkan_memory_model"
1390 )";
1391
1392 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1393 SPV_ENV_UNIVERSAL_1_3);
1394 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1395 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1396 EXPECT_THAT(getDiagnosticString(),
1397 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1398 "used with the VulkanKHR memory model."));
1399 }
1400
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal)1401 TEST_F(ValidateAtomics,
1402 VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal) {
1403 const std::string body = R"(
1404 %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %relaxed %sequentially_consistent %u32_0 %u32_0
1405 )";
1406
1407 const std::string extra = R"(
1408 OpCapability VulkanMemoryModelKHR
1409 OpExtension "SPV_KHR_vulkan_memory_model"
1410 )";
1411
1412 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1413 SPV_ENV_UNIVERSAL_1_3);
1414 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1415 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1416 EXPECT_THAT(getDiagnosticString(),
1417 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1418 "used with the VulkanKHR memory model."));
1419 }
1420
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement)1421 TEST_F(ValidateAtomics,
1422 VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement) {
1423 const std::string body = R"(
1424 %inc = OpAtomicIIncrement %u32 %u32_var %workgroup %sequentially_consistent
1425 )";
1426
1427 const std::string extra = R"(
1428 OpCapability VulkanMemoryModelKHR
1429 OpExtension "SPV_KHR_vulkan_memory_model"
1430 )";
1431
1432 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1433 SPV_ENV_UNIVERSAL_1_3);
1434 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1435 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1436 EXPECT_THAT(getDiagnosticString(),
1437 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1438 "used with the VulkanKHR memory model."));
1439 }
1440
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement)1441 TEST_F(ValidateAtomics,
1442 VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement) {
1443 const std::string body = R"(
1444 %dec = OpAtomicIDecrement %u32 %u32_var %workgroup %sequentially_consistent
1445 )";
1446
1447 const std::string extra = R"(
1448 OpCapability VulkanMemoryModelKHR
1449 OpExtension "SPV_KHR_vulkan_memory_model"
1450 )";
1451
1452 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1453 SPV_ENV_UNIVERSAL_1_3);
1454 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1455 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1456 EXPECT_THAT(getDiagnosticString(),
1457 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1458 "used with the VulkanKHR memory model."));
1459 }
1460
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd)1461 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd) {
1462 const std::string body = R"(
1463 %add = OpAtomicIAdd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1464 )";
1465
1466 const std::string extra = R"(
1467 OpCapability VulkanMemoryModelKHR
1468 OpExtension "SPV_KHR_vulkan_memory_model"
1469 )";
1470
1471 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1472 SPV_ENV_UNIVERSAL_1_3);
1473 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1474 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1475 EXPECT_THAT(getDiagnosticString(),
1476 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1477 "used with the VulkanKHR memory model."));
1478 }
1479
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicISub)1480 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicISub) {
1481 const std::string body = R"(
1482 %sub = OpAtomicISub %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1483 )";
1484
1485 const std::string extra = R"(
1486 OpCapability VulkanMemoryModelKHR
1487 OpExtension "SPV_KHR_vulkan_memory_model"
1488 )";
1489
1490 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1491 SPV_ENV_UNIVERSAL_1_3);
1492 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1493 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1494 EXPECT_THAT(getDiagnosticString(),
1495 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1496 "used with the VulkanKHR memory model."));
1497 }
1498
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicSMin)1499 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMin) {
1500 const std::string body = R"(
1501 %min = OpAtomicSMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1502 )";
1503
1504 const std::string extra = R"(
1505 OpCapability VulkanMemoryModelKHR
1506 OpExtension "SPV_KHR_vulkan_memory_model"
1507 )";
1508
1509 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1510 SPV_ENV_UNIVERSAL_1_3);
1511 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1512 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1513 EXPECT_THAT(getDiagnosticString(),
1514 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1515 "used with the VulkanKHR memory model."));
1516 }
1517
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicUMin)1518 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMin) {
1519 const std::string body = R"(
1520 %min = OpAtomicUMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1521 )";
1522
1523 const std::string extra = R"(
1524 OpCapability VulkanMemoryModelKHR
1525 OpExtension "SPV_KHR_vulkan_memory_model"
1526 )";
1527
1528 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1529 SPV_ENV_UNIVERSAL_1_3);
1530 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1531 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1532 EXPECT_THAT(getDiagnosticString(),
1533 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1534 "used with the VulkanKHR memory model."));
1535 }
1536
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicSMax)1537 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMax) {
1538 const std::string body = R"(
1539 %max = OpAtomicSMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1540 )";
1541
1542 const std::string extra = R"(
1543 OpCapability VulkanMemoryModelKHR
1544 OpExtension "SPV_KHR_vulkan_memory_model"
1545 )";
1546
1547 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1548 SPV_ENV_UNIVERSAL_1_3);
1549 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1550 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1551 EXPECT_THAT(getDiagnosticString(),
1552 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1553 "used with the VulkanKHR memory model."));
1554 }
1555
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicUMax)1556 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMax) {
1557 const std::string body = R"(
1558 %max = OpAtomicUMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1559 )";
1560
1561 const std::string extra = R"(
1562 OpCapability VulkanMemoryModelKHR
1563 OpExtension "SPV_KHR_vulkan_memory_model"
1564 )";
1565
1566 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1567 SPV_ENV_UNIVERSAL_1_3);
1568 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1569 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1570 EXPECT_THAT(getDiagnosticString(),
1571 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1572 "used with the VulkanKHR memory model."));
1573 }
1574
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicAnd)1575 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicAnd) {
1576 const std::string body = R"(
1577 %and = OpAtomicAnd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1578 )";
1579
1580 const std::string extra = R"(
1581 OpCapability VulkanMemoryModelKHR
1582 OpExtension "SPV_KHR_vulkan_memory_model"
1583 )";
1584
1585 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1586 SPV_ENV_UNIVERSAL_1_3);
1587 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1588 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1589 EXPECT_THAT(getDiagnosticString(),
1590 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1591 "used with the VulkanKHR memory model."));
1592 }
1593
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicOr)1594 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicOr) {
1595 const std::string body = R"(
1596 %or = OpAtomicOr %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1597 )";
1598
1599 const std::string extra = R"(
1600 OpCapability VulkanMemoryModelKHR
1601 OpExtension "SPV_KHR_vulkan_memory_model"
1602 )";
1603
1604 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1605 SPV_ENV_UNIVERSAL_1_3);
1606 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1607 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1608 EXPECT_THAT(getDiagnosticString(),
1609 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1610 "used with the VulkanKHR memory model."));
1611 }
1612
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicXor)1613 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicXor) {
1614 const std::string body = R"(
1615 %xor = OpAtomicXor %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1616 )";
1617
1618 const std::string extra = R"(
1619 OpCapability VulkanMemoryModelKHR
1620 OpExtension "SPV_KHR_vulkan_memory_model"
1621 )";
1622
1623 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1624 SPV_ENV_UNIVERSAL_1_3);
1625 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1626 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1627 EXPECT_THAT(getDiagnosticString(),
1628 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1629 "used with the VulkanKHR memory model."));
1630 }
1631
TEST_F(ValidateAtomics,OutputMemoryKHRRequiresVulkanMemoryModelKHR)1632 TEST_F(ValidateAtomics, OutputMemoryKHRRequiresVulkanMemoryModelKHR) {
1633 const std::string text = R"(
1634 OpCapability Shader
1635 OpMemoryModel Logical GLSL450
1636 OpEntryPoint Fragment %1 "func"
1637 OpExecutionMode %1 OriginUpperLeft
1638 %2 = OpTypeVoid
1639 %3 = OpTypeInt 32 0
1640 %semantics = OpConstant %3 4100
1641 %5 = OpTypeFunction %2
1642 %workgroup = OpConstant %3 2
1643 %ptr = OpTypePointer Workgroup %3
1644 %var = OpVariable %ptr Workgroup
1645 %1 = OpFunction %2 None %5
1646 %7 = OpLabel
1647 OpAtomicStore %var %workgroup %semantics %workgroup
1648 OpReturn
1649 OpFunctionEnd
1650 )";
1651
1652 CompileSuccessfully(text);
1653 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1654 EXPECT_THAT(getDiagnosticString(),
1655 HasSubstr("AtomicStore: Memory Semantics OutputMemoryKHR "
1656 "requires capability VulkanMemoryModelKHR"));
1657 }
1658
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresVulkanMemoryModelKHR)1659 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresVulkanMemoryModelKHR) {
1660 const std::string text = R"(
1661 OpCapability Shader
1662 OpMemoryModel Logical GLSL450
1663 OpEntryPoint Fragment %1 "func"
1664 OpExecutionMode %1 OriginUpperLeft
1665 %2 = OpTypeVoid
1666 %3 = OpTypeInt 32 0
1667 %semantics = OpConstant %3 8196
1668 %5 = OpTypeFunction %2
1669 %workgroup = OpConstant %3 2
1670 %ptr = OpTypePointer Workgroup %3
1671 %var = OpVariable %ptr Workgroup
1672 %1 = OpFunction %2 None %5
1673 %7 = OpLabel
1674 OpAtomicStore %var %workgroup %semantics %workgroup
1675 OpReturn
1676 OpFunctionEnd
1677 )";
1678
1679 CompileSuccessfully(text);
1680 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1681 EXPECT_THAT(getDiagnosticString(),
1682 HasSubstr("AtomicStore: Memory Semantics MakeAvailableKHR "
1683 "requires capability VulkanMemoryModelKHR"));
1684 }
1685
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresVulkanMemoryModelKHR)1686 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresVulkanMemoryModelKHR) {
1687 const std::string text = R"(
1688 OpCapability Shader
1689 OpMemoryModel Logical GLSL450
1690 OpEntryPoint Fragment %1 "func"
1691 OpExecutionMode %1 OriginUpperLeft
1692 %2 = OpTypeVoid
1693 %3 = OpTypeInt 32 0
1694 %semantics = OpConstant %3 16386
1695 %5 = OpTypeFunction %2
1696 %workgroup = OpConstant %3 2
1697 %ptr = OpTypePointer Workgroup %3
1698 %var = OpVariable %ptr Workgroup
1699 %1 = OpFunction %2 None %5
1700 %7 = OpLabel
1701 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1702 OpReturn
1703 OpFunctionEnd
1704 )";
1705
1706 CompileSuccessfully(text);
1707 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1708 EXPECT_THAT(getDiagnosticString(),
1709 HasSubstr("AtomicLoad: Memory Semantics MakeVisibleKHR requires "
1710 "capability VulkanMemoryModelKHR"));
1711 }
1712
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresReleaseSemantics)1713 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresReleaseSemantics) {
1714 const std::string text = R"(
1715 OpCapability Shader
1716 OpCapability VulkanMemoryModelKHR
1717 OpExtension "SPV_KHR_vulkan_memory_model"
1718 OpMemoryModel Logical VulkanKHR
1719 OpEntryPoint Fragment %1 "func"
1720 OpExecutionMode %1 OriginUpperLeft
1721 %2 = OpTypeVoid
1722 %3 = OpTypeInt 32 0
1723 %semantics = OpConstant %3 8448
1724 %5 = OpTypeFunction %2
1725 %workgroup = OpConstant %3 2
1726 %ptr = OpTypePointer Workgroup %3
1727 %var = OpVariable %ptr Workgroup
1728 %1 = OpFunction %2 None %5
1729 %7 = OpLabel
1730 OpAtomicStore %var %workgroup %semantics %workgroup
1731 OpReturn
1732 OpFunctionEnd
1733 )";
1734
1735 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1736 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1737 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1738 EXPECT_THAT(
1739 getDiagnosticString(),
1740 HasSubstr("AtomicStore: MakeAvailableKHR Memory Semantics also requires "
1741 "either Release or AcquireRelease Memory Semantics"));
1742 }
1743
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresAcquireSemantics)1744 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresAcquireSemantics) {
1745 const std::string text = R"(
1746 OpCapability Shader
1747 OpCapability VulkanMemoryModelKHR
1748 OpExtension "SPV_KHR_vulkan_memory_model"
1749 OpMemoryModel Logical VulkanKHR
1750 OpEntryPoint Fragment %1 "func"
1751 OpExecutionMode %1 OriginUpperLeft
1752 %2 = OpTypeVoid
1753 %3 = OpTypeInt 32 0
1754 %semantics = OpConstant %3 16640
1755 %5 = OpTypeFunction %2
1756 %workgroup = OpConstant %3 2
1757 %ptr = OpTypePointer Workgroup %3
1758 %var = OpVariable %ptr Workgroup
1759 %1 = OpFunction %2 None %5
1760 %7 = OpLabel
1761 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1762 OpReturn
1763 OpFunctionEnd
1764 )";
1765
1766 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1767 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1768 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1769 EXPECT_THAT(
1770 getDiagnosticString(),
1771 HasSubstr("AtomicLoad: MakeVisibleKHR Memory Semantics also requires "
1772 "either Acquire or AcquireRelease Memory Semantics"));
1773 }
1774
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresStorageSemantics)1775 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresStorageSemantics) {
1776 const std::string text = R"(
1777 OpCapability Shader
1778 OpCapability VulkanMemoryModelKHR
1779 OpExtension "SPV_KHR_vulkan_memory_model"
1780 OpMemoryModel Logical VulkanKHR
1781 OpEntryPoint Fragment %1 "func"
1782 OpExecutionMode %1 OriginUpperLeft
1783 %2 = OpTypeVoid
1784 %3 = OpTypeInt 32 0
1785 %semantics = OpConstant %3 8196
1786 %5 = OpTypeFunction %2
1787 %workgroup = OpConstant %3 2
1788 %ptr = OpTypePointer Workgroup %3
1789 %var = OpVariable %ptr Workgroup
1790 %1 = OpFunction %2 None %5
1791 %7 = OpLabel
1792 OpAtomicStore %var %workgroup %semantics %workgroup
1793 OpReturn
1794 OpFunctionEnd
1795 )";
1796
1797 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1798 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1799 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1800 EXPECT_THAT(
1801 getDiagnosticString(),
1802 HasSubstr(
1803 "AtomicStore: expected Memory Semantics to include a storage class"));
1804 }
1805
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresStorageSemantics)1806 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresStorageSemantics) {
1807 const std::string text = R"(
1808 OpCapability Shader
1809 OpCapability VulkanMemoryModelKHR
1810 OpExtension "SPV_KHR_vulkan_memory_model"
1811 OpMemoryModel Logical VulkanKHR
1812 OpEntryPoint Fragment %1 "func"
1813 OpExecutionMode %1 OriginUpperLeft
1814 %2 = OpTypeVoid
1815 %3 = OpTypeInt 32 0
1816 %semantics = OpConstant %3 16386
1817 %5 = OpTypeFunction %2
1818 %workgroup = OpConstant %3 2
1819 %ptr = OpTypePointer Workgroup %3
1820 %var = OpVariable %ptr Workgroup
1821 %1 = OpFunction %2 None %5
1822 %7 = OpLabel
1823 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1824 OpReturn
1825 OpFunctionEnd
1826 )";
1827
1828 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1829 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1830 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1831 EXPECT_THAT(
1832 getDiagnosticString(),
1833 HasSubstr(
1834 "AtomicLoad: expected Memory Semantics to include a storage class"));
1835 }
1836
TEST_F(ValidateAtomics,VulkanMemoryModelAllowsQueueFamilyKHR)1837 TEST_F(ValidateAtomics, VulkanMemoryModelAllowsQueueFamilyKHR) {
1838 const std::string body = R"(
1839 %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
1840 )";
1841
1842 const std::string extra = R"(
1843 OpCapability VulkanMemoryModelKHR
1844 OpExtension "SPV_KHR_vulkan_memory_model"
1845 )";
1846
1847 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1848 SPV_ENV_VULKAN_1_1);
1849 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_1));
1850 }
1851
TEST_F(ValidateAtomics,NonVulkanMemoryModelDisallowsQueueFamilyKHR)1852 TEST_F(ValidateAtomics, NonVulkanMemoryModelDisallowsQueueFamilyKHR) {
1853 const std::string body = R"(
1854 %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
1855 )";
1856
1857 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_1);
1858 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_1));
1859 EXPECT_THAT(getDiagnosticString(),
1860 HasSubstr("AtomicAnd: Memory Scope QueueFamilyKHR requires "
1861 "capability VulkanMemoryModelKHR\n %42 = OpAtomicAnd "
1862 "%uint %29 %uint_5 %uint_0_1 %uint_1\n"));
1863 }
1864
TEST_F(ValidateAtomics,SemanticsSpecConstantShader)1865 TEST_F(ValidateAtomics, SemanticsSpecConstantShader) {
1866 const std::string spirv = R"(
1867 OpCapability Shader
1868 OpMemoryModel Logical GLSL450
1869 OpEntryPoint Fragment %func "func"
1870 OpExecutionMode %func OriginUpperLeft
1871 %void = OpTypeVoid
1872 %int = OpTypeInt 32 0
1873 %spec_const = OpSpecConstant %int 0
1874 %workgroup = OpConstant %int 2
1875 %ptr_int_workgroup = OpTypePointer Workgroup %int
1876 %var = OpVariable %ptr_int_workgroup Workgroup
1877 %voidfn = OpTypeFunction %void
1878 %func = OpFunction %void None %voidfn
1879 %entry = OpLabel
1880 %ld = OpAtomicLoad %int %var %workgroup %spec_const
1881 OpReturn
1882 OpFunctionEnd
1883 )";
1884
1885 CompileSuccessfully(spirv);
1886 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1887 EXPECT_THAT(getDiagnosticString(),
1888 HasSubstr("Memory Semantics ids must be OpConstant when Shader "
1889 "capability is present"));
1890 }
1891
TEST_F(ValidateAtomics,SemanticsSpecConstantKernel)1892 TEST_F(ValidateAtomics, SemanticsSpecConstantKernel) {
1893 const std::string spirv = R"(
1894 OpCapability Kernel
1895 OpCapability Linkage
1896 OpMemoryModel Logical OpenCL
1897 %void = OpTypeVoid
1898 %int = OpTypeInt 32 0
1899 %spec_const = OpSpecConstant %int 0
1900 %workgroup = OpConstant %int 2
1901 %ptr_int_workgroup = OpTypePointer Workgroup %int
1902 %var = OpVariable %ptr_int_workgroup Workgroup
1903 %voidfn = OpTypeFunction %void
1904 %func = OpFunction %void None %voidfn
1905 %entry = OpLabel
1906 %ld = OpAtomicLoad %int %var %workgroup %spec_const
1907 OpReturn
1908 OpFunctionEnd
1909 )";
1910
1911 CompileSuccessfully(spirv);
1912 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
1913 }
1914
TEST_F(ValidateAtomics,ScopeSpecConstantShader)1915 TEST_F(ValidateAtomics, ScopeSpecConstantShader) {
1916 const std::string spirv = R"(
1917 OpCapability Shader
1918 OpMemoryModel Logical GLSL450
1919 OpEntryPoint Fragment %func "func"
1920 OpExecutionMode %func OriginUpperLeft
1921 %void = OpTypeVoid
1922 %int = OpTypeInt 32 0
1923 %spec_const = OpSpecConstant %int 0
1924 %relaxed = OpConstant %int 0
1925 %ptr_int_workgroup = OpTypePointer Workgroup %int
1926 %var = OpVariable %ptr_int_workgroup Workgroup
1927 %voidfn = OpTypeFunction %void
1928 %func = OpFunction %void None %voidfn
1929 %entry = OpLabel
1930 %ld = OpAtomicLoad %int %var %spec_const %relaxed
1931 OpReturn
1932 OpFunctionEnd
1933 )";
1934
1935 CompileSuccessfully(spirv);
1936 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1937 EXPECT_THAT(
1938 getDiagnosticString(),
1939 HasSubstr(
1940 "Scope ids must be OpConstant when Shader capability is present"));
1941 }
1942
TEST_F(ValidateAtomics,ScopeSpecConstantKernel)1943 TEST_F(ValidateAtomics, ScopeSpecConstantKernel) {
1944 const std::string spirv = R"(
1945 OpCapability Kernel
1946 OpCapability Linkage
1947 OpMemoryModel Logical OpenCL
1948 %void = OpTypeVoid
1949 %int = OpTypeInt 32 0
1950 %spec_const = OpSpecConstant %int 0
1951 %relaxed = OpConstant %int 0
1952 %ptr_int_workgroup = OpTypePointer Workgroup %int
1953 %var = OpVariable %ptr_int_workgroup Workgroup
1954 %voidfn = OpTypeFunction %void
1955 %func = OpFunction %void None %voidfn
1956 %entry = OpLabel
1957 %ld = OpAtomicLoad %int %var %spec_const %relaxed
1958 OpReturn
1959 OpFunctionEnd
1960 )";
1961
1962 CompileSuccessfully(spirv);
1963 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
1964 }
1965
TEST_F(ValidateAtomics,VulkanMemoryModelDeviceScopeBad)1966 TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeBad) {
1967 const std::string body = R"(
1968 %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
1969 )";
1970
1971 const std::string extra = R"(OpCapability VulkanMemoryModelKHR
1972 OpExtension "SPV_KHR_vulkan_memory_model"
1973 )";
1974
1975 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1976 SPV_ENV_UNIVERSAL_1_3);
1977 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1978 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1979 EXPECT_THAT(
1980 getDiagnosticString(),
1981 HasSubstr("Use of device scope with VulkanKHR memory model requires the "
1982 "VulkanMemoryModelDeviceScopeKHR capability"));
1983 }
1984
TEST_F(ValidateAtomics,VulkanMemoryModelDeviceScopeGood)1985 TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeGood) {
1986 const std::string body = R"(
1987 %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
1988 )";
1989
1990 const std::string extra = R"(OpCapability VulkanMemoryModelKHR
1991 OpCapability VulkanMemoryModelDeviceScopeKHR
1992 OpExtension "SPV_KHR_vulkan_memory_model"
1993 )";
1994
1995 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1996 SPV_ENV_UNIVERSAL_1_3);
1997 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1998 }
1999
TEST_F(ValidateAtomics,CompareExchangeWeakV13ValV14Good)2000 TEST_F(ValidateAtomics, CompareExchangeWeakV13ValV14Good) {
2001 const std::string body = R"(
2002 %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
2003 )";
2004
2005 CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_3);
2006 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
2007 }
2008
TEST_F(ValidateAtomics,CompareExchangeWeakV14Bad)2009 TEST_F(ValidateAtomics, CompareExchangeWeakV14Bad) {
2010 const std::string body = R"(
2011 %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
2012 )";
2013
2014 CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_4);
2015 EXPECT_EQ(SPV_ERROR_WRONG_VERSION,
2016 ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
2017 EXPECT_THAT(
2018 getDiagnosticString(),
2019 HasSubstr(
2020 "AtomicCompareExchangeWeak requires SPIR-V version 1.3 or earlier"));
2021 }
2022
TEST_F(ValidateAtomics,CompareExchangeVolatileMatch)2023 TEST_F(ValidateAtomics, CompareExchangeVolatileMatch) {
2024 const std::string spirv = R"(
2025 OpCapability Shader
2026 OpCapability VulkanMemoryModelKHR
2027 OpCapability Linkage
2028 OpExtension "SPV_KHR_vulkan_memory_model"
2029 OpMemoryModel Logical VulkanKHR
2030 %void = OpTypeVoid
2031 %int = OpTypeInt 32 0
2032 %int_0 = OpConstant %int 0
2033 %int_1 = OpConstant %int 1
2034 %workgroup = OpConstant %int 2
2035 %volatile = OpConstant %int 32768
2036 %ptr_wg_int = OpTypePointer Workgroup %int
2037 %wg_var = OpVariable %ptr_wg_int Workgroup
2038 %void_fn = OpTypeFunction %void
2039 %func = OpFunction %void None %void_fn
2040 %entry = OpLabel
2041 %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %volatile %int_0 %int_1
2042 OpReturn
2043 OpFunctionEnd
2044 )";
2045
2046 CompileSuccessfully(spirv);
2047 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
2048 }
2049
TEST_F(ValidateAtomics,CompareExchangeVolatileMismatch)2050 TEST_F(ValidateAtomics, CompareExchangeVolatileMismatch) {
2051 const std::string spirv = R"(
2052 OpCapability Shader
2053 OpCapability VulkanMemoryModelKHR
2054 OpCapability Linkage
2055 OpExtension "SPV_KHR_vulkan_memory_model"
2056 OpMemoryModel Logical VulkanKHR
2057 %void = OpTypeVoid
2058 %int = OpTypeInt 32 0
2059 %int_0 = OpConstant %int 0
2060 %int_1 = OpConstant %int 1
2061 %workgroup = OpConstant %int 2
2062 %volatile = OpConstant %int 32768
2063 %non_volatile = OpConstant %int 0
2064 %ptr_wg_int = OpTypePointer Workgroup %int
2065 %wg_var = OpVariable %ptr_wg_int Workgroup
2066 %void_fn = OpTypeFunction %void
2067 %func = OpFunction %void None %void_fn
2068 %entry = OpLabel
2069 %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %non_volatile %volatile %int_0 %int_1
2070 OpReturn
2071 OpFunctionEnd
2072 )";
2073
2074 CompileSuccessfully(spirv);
2075 EXPECT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
2076 EXPECT_THAT(getDiagnosticString(),
2077 HasSubstr("Volatile mask setting must match for Equal and "
2078 "Unequal memory semantics"));
2079 }
2080
TEST_F(ValidateAtomics,CompareExchangeVolatileMismatchCooperativeMatrix)2081 TEST_F(ValidateAtomics, CompareExchangeVolatileMismatchCooperativeMatrix) {
2082 const std::string spirv = R"(
2083 OpCapability Shader
2084 OpCapability VulkanMemoryModelKHR
2085 OpCapability Linkage
2086 OpCapability CooperativeMatrixNV
2087 OpExtension "SPV_KHR_vulkan_memory_model"
2088 OpExtension "SPV_NV_cooperative_matrix"
2089 OpMemoryModel Logical VulkanKHR
2090 %void = OpTypeVoid
2091 %int = OpTypeInt 32 0
2092 %int_0 = OpConstant %int 0
2093 %int_1 = OpConstant %int 1
2094 %workgroup = OpConstant %int 2
2095 %volatile = OpSpecConstant %int 32768
2096 %non_volatile = OpSpecConstant %int 32768
2097 %ptr_wg_int = OpTypePointer Workgroup %int
2098 %wg_var = OpVariable %ptr_wg_int Workgroup
2099 %void_fn = OpTypeFunction %void
2100 %func = OpFunction %void None %void_fn
2101 %entry = OpLabel
2102 %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %non_volatile %int_0 %int_1
2103 OpReturn
2104 OpFunctionEnd
2105 )";
2106
2107 // This is ok because we cannot evaluate the spec constant defaults.
2108 CompileSuccessfully(spirv);
2109 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
2110 }
2111
TEST_F(ValidateAtomics,VolatileRequiresVulkanMemoryModel)2112 TEST_F(ValidateAtomics, VolatileRequiresVulkanMemoryModel) {
2113 const std::string spirv = R"(
2114 OpCapability Shader
2115 OpCapability Linkage
2116 OpMemoryModel Logical GLSL450
2117 %void = OpTypeVoid
2118 %int = OpTypeInt 32 0
2119 %int_0 = OpConstant %int 0
2120 %int_1 = OpConstant %int 1
2121 %workgroup = OpConstant %int 2
2122 %volatile = OpConstant %int 32768
2123 %ptr_wg_int = OpTypePointer Workgroup %int
2124 %wg_var = OpVariable %ptr_wg_int Workgroup
2125 %void_fn = OpTypeFunction %void
2126 %func = OpFunction %void None %void_fn
2127 %entry = OpLabel
2128 %ld = OpAtomicLoad %int %wg_var %workgroup %volatile
2129 OpReturn
2130 OpFunctionEnd
2131 )";
2132
2133 CompileSuccessfully(spirv);
2134 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
2135 EXPECT_THAT(getDiagnosticString(),
2136 HasSubstr("Memory Semantics Volatile requires capability "
2137 "VulkanMemoryModelKHR"));
2138 }
2139
TEST_F(ValidateAtomics,CooperativeMatrixSemanticsMustBeConstant)2140 TEST_F(ValidateAtomics, CooperativeMatrixSemanticsMustBeConstant) {
2141 const std::string spirv = R"(
2142 OpCapability Shader
2143 OpCapability Linkage
2144 OpCapability CooperativeMatrixNV
2145 OpExtension "SPV_NV_cooperative_matrix"
2146 OpMemoryModel Logical GLSL450
2147 %void = OpTypeVoid
2148 %int = OpTypeInt 32 0
2149 %int_0 = OpConstant %int 0
2150 %int_1 = OpConstant %int 1
2151 %workgroup = OpConstant %int 2
2152 %undef = OpUndef %int
2153 %ptr_wg_int = OpTypePointer Workgroup %int
2154 %wg_var = OpVariable %ptr_wg_int Workgroup
2155 %void_fn = OpTypeFunction %void
2156 %func = OpFunction %void None %void_fn
2157 %entry = OpLabel
2158 %ld = OpAtomicLoad %int %wg_var %workgroup %undef
2159 OpReturn
2160 OpFunctionEnd
2161 )";
2162
2163 CompileSuccessfully(spirv);
2164 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
2165 EXPECT_THAT(getDiagnosticString(),
2166 HasSubstr("Memory Semantics must be a constant instruction when "
2167 "CooperativeMatrixNV capability is present"));
2168 }
2169
2170 } // namespace
2171 } // namespace val
2172 } // namespace spvtools
2173