1 // Copyright (c) 2017 Google Inc.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include <sstream>
16 #include <string>
17
18 #include "gmock/gmock.h"
19 #include "test/unit_spirv.h"
20 #include "test/val/val_fixtures.h"
21
22 namespace spvtools {
23 namespace val {
24 namespace {
25
26 using ::testing::HasSubstr;
27 using ::testing::Not;
28
29 using ValidateAtomics = spvtest::ValidateBase<bool>;
30
GenerateShaderCodeImpl(const std::string & body,const std::string & capabilities_and_extensions,const std::string & definitions,const std::string & memory_model)31 std::string GenerateShaderCodeImpl(
32 const std::string& body, const std::string& capabilities_and_extensions,
33 const std::string& definitions, const std::string& memory_model) {
34 std::ostringstream ss;
35 ss << R"(
36 OpCapability Shader
37 )";
38 ss << capabilities_and_extensions;
39 ss << "OpMemoryModel Logical " << memory_model << "\n";
40 ss << R"(
41 OpEntryPoint Fragment %main "main"
42 OpExecutionMode %main OriginUpperLeft
43 %void = OpTypeVoid
44 %func = OpTypeFunction %void
45 %bool = OpTypeBool
46 %f32 = OpTypeFloat 32
47 %u32 = OpTypeInt 32 0
48 %f32vec4 = OpTypeVector %f32 4
49
50 %f32_0 = OpConstant %f32 0
51 %f32_1 = OpConstant %f32 1
52 %u32_0 = OpConstant %u32 0
53 %u32_1 = OpConstant %u32 1
54 %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
55
56 %cross_device = OpConstant %u32 0
57 %device = OpConstant %u32 1
58 %workgroup = OpConstant %u32 2
59 %subgroup = OpConstant %u32 3
60 %invocation = OpConstant %u32 4
61 %queuefamily = OpConstant %u32 5
62
63 %relaxed = OpConstant %u32 0
64 %acquire = OpConstant %u32 2
65 %release = OpConstant %u32 4
66 %acquire_release = OpConstant %u32 8
67 %acquire_and_release = OpConstant %u32 6
68 %sequentially_consistent = OpConstant %u32 16
69 %acquire_release_uniform_workgroup = OpConstant %u32 328
70
71 %f32_ptr = OpTypePointer Workgroup %f32
72 %f32_var = OpVariable %f32_ptr Workgroup
73
74 %u32_ptr = OpTypePointer Workgroup %u32
75 %u32_var = OpVariable %u32_ptr Workgroup
76
77 %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
78 %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
79
80 %f32_ptr_function = OpTypePointer Function %f32
81 )";
82 ss << definitions;
83 ss << R"(
84 %main = OpFunction %void None %func
85 %main_entry = OpLabel
86 )";
87 ss << body;
88 ss << R"(
89 OpReturn
90 OpFunctionEnd)";
91
92 return ss.str();
93 }
94
GenerateShaderCode(const std::string & body,const std::string & capabilities_and_extensions="",const std::string & memory_model="GLSL450")95 std::string GenerateShaderCode(
96 const std::string& body,
97 const std::string& capabilities_and_extensions = "",
98 const std::string& memory_model = "GLSL450") {
99 const std::string defintions = R"(
100 %u64 = OpTypeInt 64 0
101 %s64 = OpTypeInt 64 1
102
103 %u64_1 = OpConstant %u64 1
104 %s64_1 = OpConstant %s64 1
105
106 %u64_ptr = OpTypePointer Workgroup %u64
107 %s64_ptr = OpTypePointer Workgroup %s64
108 %u64_var = OpVariable %u64_ptr Workgroup
109 %s64_var = OpVariable %s64_ptr Workgroup
110 )";
111 return GenerateShaderCodeImpl(
112 body, "OpCapability Int64\n" + capabilities_and_extensions, defintions,
113 memory_model);
114 }
115
GenerateWebGPUShaderCode(const std::string & body,const std::string & capabilities_and_extensions="")116 std::string GenerateWebGPUShaderCode(
117 const std::string& body,
118 const std::string& capabilities_and_extensions = "") {
119 const std::string vulkan_memory_capability = R"(
120 OpCapability VulkanMemoryModelDeviceScopeKHR
121 OpCapability VulkanMemoryModelKHR
122 )";
123 const std::string vulkan_memory_extension = R"(
124 OpExtension "SPV_KHR_vulkan_memory_model"
125 )";
126 return GenerateShaderCodeImpl(body,
127 vulkan_memory_capability +
128 capabilities_and_extensions +
129 vulkan_memory_extension,
130 "", "VulkanKHR");
131 }
132
GenerateKernelCode(const std::string & body,const std::string & capabilities_and_extensions="")133 std::string GenerateKernelCode(
134 const std::string& body,
135 const std::string& capabilities_and_extensions = "") {
136 std::ostringstream ss;
137 ss << R"(
138 OpCapability Addresses
139 OpCapability Kernel
140 OpCapability Linkage
141 OpCapability Int64
142 )";
143
144 ss << capabilities_and_extensions;
145 ss << R"(
146 OpMemoryModel Physical32 OpenCL
147 %void = OpTypeVoid
148 %func = OpTypeFunction %void
149 %bool = OpTypeBool
150 %f32 = OpTypeFloat 32
151 %u32 = OpTypeInt 32 0
152 %u64 = OpTypeInt 64 0
153 %f32vec4 = OpTypeVector %f32 4
154
155 %f32_0 = OpConstant %f32 0
156 %f32_1 = OpConstant %f32 1
157 %u32_0 = OpConstant %u32 0
158 %u32_1 = OpConstant %u32 1
159 %u64_1 = OpConstant %u64 1
160 %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
161
162 %cross_device = OpConstant %u32 0
163 %device = OpConstant %u32 1
164 %workgroup = OpConstant %u32 2
165 %subgroup = OpConstant %u32 3
166 %invocation = OpConstant %u32 4
167
168 %relaxed = OpConstant %u32 0
169 %acquire = OpConstant %u32 2
170 %release = OpConstant %u32 4
171 %acquire_release = OpConstant %u32 8
172 %acquire_and_release = OpConstant %u32 6
173 %sequentially_consistent = OpConstant %u32 16
174 %acquire_release_uniform_workgroup = OpConstant %u32 328
175 %acquire_release_atomic_counter_workgroup = OpConstant %u32 1288
176
177 %f32_ptr = OpTypePointer Workgroup %f32
178 %f32_var = OpVariable %f32_ptr Workgroup
179
180 %u32_ptr = OpTypePointer Workgroup %u32
181 %u32_var = OpVariable %u32_ptr Workgroup
182
183 %u64_ptr = OpTypePointer Workgroup %u64
184 %u64_var = OpVariable %u64_ptr Workgroup
185
186 %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
187 %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
188
189 %f32_ptr_function = OpTypePointer Function %f32
190 %f32_ptr_uniformconstant = OpTypePointer UniformConstant %f32
191 %f32_uc_var = OpVariable %f32_ptr_uniformconstant UniformConstant
192
193 %main = OpFunction %void None %func
194 %main_entry = OpLabel
195 )";
196
197 ss << body;
198
199 ss << R"(
200 OpReturn
201 OpFunctionEnd)";
202
203 return ss.str();
204 }
205
TEST_F(ValidateAtomics,AtomicLoadShaderSuccess)206 TEST_F(ValidateAtomics, AtomicLoadShaderSuccess) {
207 const std::string body = R"(
208 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
209 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
210 %val3 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
211 )";
212
213 CompileSuccessfully(GenerateShaderCode(body));
214 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
215 }
216
TEST_F(ValidateAtomics,AtomicLoadKernelSuccess)217 TEST_F(ValidateAtomics, AtomicLoadKernelSuccess) {
218 const std::string body = R"(
219 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
220 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
221 %val3 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
222 )";
223
224 CompileSuccessfully(GenerateKernelCode(body));
225 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
226 }
227
TEST_F(ValidateAtomics,AtomicLoadVulkanSuccess)228 TEST_F(ValidateAtomics, AtomicLoadVulkanSuccess) {
229 const std::string body = R"(
230 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
231 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
232 )";
233
234 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
235 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
236 }
237
TEST_F(ValidateAtomics,AtomicStoreOpenCLFunctionPointerStorageTypeSuccess)238 TEST_F(ValidateAtomics, AtomicStoreOpenCLFunctionPointerStorageTypeSuccess) {
239 const std::string body = R"(
240 %f32_var_function = OpVariable %f32_ptr_function Function
241 OpAtomicStore %f32_var_function %device %relaxed %f32_1
242 )";
243
244 CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_OPENCL_1_2);
245 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_OPENCL_1_2));
246 }
247
TEST_F(ValidateAtomics,AtomicStoreVulkanFunctionPointerStorageType)248 TEST_F(ValidateAtomics, AtomicStoreVulkanFunctionPointerStorageType) {
249 const std::string body = R"(
250 %f32_var_function = OpVariable %f32_ptr_function Function
251 OpAtomicStore %f32_var_function %device %relaxed %f32_1
252 )";
253
254 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
255 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
256 EXPECT_THAT(
257 getDiagnosticString(),
258 HasSubstr("AtomicStore: expected Pointer Storage Class to be Uniform, "
259 "Workgroup, CrossWorkgroup, Generic, AtomicCounter, Image or "
260 "StorageBuffer"));
261 }
262
263 // TODO(atgoo@github.com): the corresponding check fails Vulkan CTS,
264 // reenable once fixed.
TEST_F(ValidateAtomics,DISABLED_AtomicLoadVulkanSubgroup)265 TEST_F(ValidateAtomics, DISABLED_AtomicLoadVulkanSubgroup) {
266 const std::string body = R"(
267 %val1 = OpAtomicLoad %u32 %u32_var %subgroup %acquire
268 )";
269
270 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
271 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
272 EXPECT_THAT(getDiagnosticString(),
273 HasSubstr("AtomicLoad: in Vulkan environment memory scope is "
274 "limited to Device, Workgroup and Invocation"));
275 }
276
TEST_F(ValidateAtomics,AtomicLoadVulkanRelease)277 TEST_F(ValidateAtomics, AtomicLoadVulkanRelease) {
278 const std::string body = R"(
279 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %release
280 )";
281
282 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
283 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
284 EXPECT_THAT(
285 getDiagnosticString(),
286 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
287 "Release, AcquireRelease and SequentiallyConsistent"));
288 }
289
TEST_F(ValidateAtomics,AtomicLoadVulkanAcquireRelease)290 TEST_F(ValidateAtomics, AtomicLoadVulkanAcquireRelease) {
291 const std::string body = R"(
292 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %acquire_release
293 )";
294
295 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
296 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
297 EXPECT_THAT(
298 getDiagnosticString(),
299 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
300 "Release, AcquireRelease and SequentiallyConsistent"));
301 }
302
TEST_F(ValidateAtomics,AtomicLoadVulkanSequentiallyConsistent)303 TEST_F(ValidateAtomics, AtomicLoadVulkanSequentiallyConsistent) {
304 const std::string body = R"(
305 %val1 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
306 )";
307
308 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
309 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
310 EXPECT_THAT(
311 getDiagnosticString(),
312 HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
313 "Release, AcquireRelease and SequentiallyConsistent"));
314 }
315
TEST_F(ValidateAtomics,AtomicLoadShaderFloat)316 TEST_F(ValidateAtomics, AtomicLoadShaderFloat) {
317 const std::string body = R"(
318 %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
319 )";
320
321 CompileSuccessfully(GenerateShaderCode(body));
322 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
323 EXPECT_THAT(getDiagnosticString(),
324 HasSubstr("AtomicLoad: "
325 "expected Result Type to be int scalar type"));
326 }
327
TEST_F(ValidateAtomics,AtomicLoadVulkanInt64)328 TEST_F(ValidateAtomics, AtomicLoadVulkanInt64) {
329 const std::string body = R"(
330 %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
331 )";
332
333 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
334 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
335 EXPECT_THAT(
336 getDiagnosticString(),
337 HasSubstr(
338 "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
339 }
340
TEST_F(ValidateAtomics,AtomicLoadWebGPUShaderSuccess)341 TEST_F(ValidateAtomics, AtomicLoadWebGPUShaderSuccess) {
342 const std::string body = R"(
343 %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
344 %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
345 )";
346
347 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
348 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_WEBGPU_0));
349 }
350
TEST_F(ValidateAtomics,AtomicLoadWebGPUShaderSequentiallyConsistentFailure)351 TEST_F(ValidateAtomics, AtomicLoadWebGPUShaderSequentiallyConsistentFailure) {
352 const std::string body = R"(
353 %val3 = OpAtomicLoad %u32 %u32_var %subgroup %sequentially_consistent
354 )";
355
356 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
357 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
358 EXPECT_THAT(
359 getDiagnosticString(),
360 HasSubstr(
361 "WebGPU spec disallows any bit masks in Memory Semantics that are "
362 "not Acquire, Release, AcquireRelease, UniformMemory, "
363 "WorkgroupMemory, ImageMemory, OutputMemoryKHR, MakeAvailableKHR, or "
364 "MakeVisibleKHR\n %34 = OpAtomicLoad %uint %29 %uint_3 %uint_16\n"));
365 }
366
TEST_F(ValidateAtomics,VK_KHR_shader_atomic_int64Success)367 TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64Success) {
368 const std::string body = R"(
369 %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
370 %val2 = OpAtomicUMax %u64 %u64_var %device %relaxed %u64_1
371 %val3 = OpAtomicSMin %u64 %u64_var %device %relaxed %u64_1
372 %val4 = OpAtomicSMax %u64 %u64_var %device %relaxed %u64_1
373 %val5 = OpAtomicAnd %u64 %u64_var %device %relaxed %u64_1
374 %val6 = OpAtomicOr %u64 %u64_var %device %relaxed %u64_1
375 %val7 = OpAtomicXor %u64 %u64_var %device %relaxed %u64_1
376 %val8 = OpAtomicIAdd %u64 %u64_var %device %relaxed %u64_1
377 %val9 = OpAtomicExchange %u64 %u64_var %device %relaxed %u64_1
378 %val10 = OpAtomicCompareExchange %u64 %u64_var %device %relaxed %relaxed %u64_1 %u64_1
379
380 %val11 = OpAtomicUMin %s64 %s64_var %device %relaxed %s64_1
381 %val12 = OpAtomicUMax %s64 %s64_var %device %relaxed %s64_1
382 %val13 = OpAtomicSMin %s64 %s64_var %device %relaxed %s64_1
383 %val14 = OpAtomicSMax %s64 %s64_var %device %relaxed %s64_1
384 %val15 = OpAtomicAnd %s64 %s64_var %device %relaxed %s64_1
385 %val16 = OpAtomicOr %s64 %s64_var %device %relaxed %s64_1
386 %val17 = OpAtomicXor %s64 %s64_var %device %relaxed %s64_1
387 %val18 = OpAtomicIAdd %s64 %s64_var %device %relaxed %s64_1
388 %val19 = OpAtomicExchange %s64 %s64_var %device %relaxed %s64_1
389 %val20 = OpAtomicCompareExchange %s64 %s64_var %device %relaxed %relaxed %s64_1 %s64_1
390
391 %val21 = OpAtomicLoad %u64 %u64_var %device %relaxed
392 %val22 = OpAtomicLoad %s64 %s64_var %device %relaxed
393
394 OpAtomicStore %u64_var %device %relaxed %u64_1
395 OpAtomicStore %s64_var %device %relaxed %s64_1
396 )";
397
398 CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"),
399 SPV_ENV_VULKAN_1_0);
400 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
401 }
402
TEST_F(ValidateAtomics,VK_KHR_shader_atomic_int64MissingCapability)403 TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64MissingCapability) {
404 const std::string body = R"(
405 %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
406 )";
407
408 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
409 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
410 EXPECT_THAT(
411 getDiagnosticString(),
412 HasSubstr(
413 "AtomicUMin: 64-bit atomics require the Int64Atomics capability"));
414 }
415
TEST_F(ValidateAtomics,AtomicLoadWrongResultType)416 TEST_F(ValidateAtomics, AtomicLoadWrongResultType) {
417 const std::string body = R"(
418 %val1 = OpAtomicLoad %f32vec4 %f32vec4_var %device %relaxed
419 )";
420
421 CompileSuccessfully(GenerateKernelCode(body));
422 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
423 EXPECT_THAT(getDiagnosticString(),
424 HasSubstr("AtomicLoad: "
425 "expected Result Type to be int or float scalar type"));
426 }
427
TEST_F(ValidateAtomics,AtomicLoadWrongPointerType)428 TEST_F(ValidateAtomics, AtomicLoadWrongPointerType) {
429 const std::string body = R"(
430 %val1 = OpAtomicLoad %f32 %f32_ptr %device %relaxed
431 )";
432
433 CompileSuccessfully(GenerateKernelCode(body));
434 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
435 EXPECT_THAT(getDiagnosticString(),
436 HasSubstr("Operand 27[%_ptr_Workgroup_float] cannot be a type"));
437 }
438
TEST_F(ValidateAtomics,AtomicLoadWrongPointerDataType)439 TEST_F(ValidateAtomics, AtomicLoadWrongPointerDataType) {
440 const std::string body = R"(
441 %val1 = OpAtomicLoad %u32 %f32_var %device %relaxed
442 )";
443
444 CompileSuccessfully(GenerateKernelCode(body));
445 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
446 EXPECT_THAT(
447 getDiagnosticString(),
448 HasSubstr("AtomicLoad: "
449 "expected Pointer to point to a value of type Result Type"));
450 }
451
TEST_F(ValidateAtomics,AtomicLoadWrongScopeType)452 TEST_F(ValidateAtomics, AtomicLoadWrongScopeType) {
453 const std::string body = R"(
454 %val1 = OpAtomicLoad %f32 %f32_var %f32_1 %relaxed
455 )";
456
457 CompileSuccessfully(GenerateKernelCode(body));
458 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
459 EXPECT_THAT(
460 getDiagnosticString(),
461 HasSubstr("AtomicLoad: expected Memory Scope to be a 32-bit int\n %40 = "
462 "OpAtomicLoad %float %28 %float_1 %uint_0_1\n"));
463 }
464
TEST_F(ValidateAtomics,AtomicLoadWrongMemorySemanticsType)465 TEST_F(ValidateAtomics, AtomicLoadWrongMemorySemanticsType) {
466 const std::string body = R"(
467 %val1 = OpAtomicLoad %f32 %f32_var %device %u64_1
468 )";
469
470 CompileSuccessfully(GenerateKernelCode(body));
471 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
472 EXPECT_THAT(
473 getDiagnosticString(),
474 HasSubstr("AtomicLoad: expected Memory Semantics to be a 32-bit int"));
475 }
476
TEST_F(ValidateAtomics,AtomicStoreKernelSuccess)477 TEST_F(ValidateAtomics, AtomicStoreKernelSuccess) {
478 const std::string body = R"(
479 OpAtomicStore %f32_var %device %relaxed %f32_1
480 OpAtomicStore %u32_var %subgroup %release %u32_1
481 )";
482
483 CompileSuccessfully(GenerateKernelCode(body));
484 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
485 }
486
TEST_F(ValidateAtomics,AtomicStoreShaderSuccess)487 TEST_F(ValidateAtomics, AtomicStoreShaderSuccess) {
488 const std::string body = R"(
489 OpAtomicStore %u32_var %device %release %u32_1
490 OpAtomicStore %u32_var %subgroup %sequentially_consistent %u32_1
491 )";
492
493 CompileSuccessfully(GenerateShaderCode(body));
494 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
495 }
496
TEST_F(ValidateAtomics,AtomicStoreVulkanSuccess)497 TEST_F(ValidateAtomics, AtomicStoreVulkanSuccess) {
498 const std::string body = R"(
499 OpAtomicStore %u32_var %device %release %u32_1
500 )";
501
502 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
503 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
504 }
505
TEST_F(ValidateAtomics,AtomicStoreVulkanAcquire)506 TEST_F(ValidateAtomics, AtomicStoreVulkanAcquire) {
507 const std::string body = R"(
508 OpAtomicStore %u32_var %device %acquire %u32_1
509 )";
510
511 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
512 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
513 EXPECT_THAT(
514 getDiagnosticString(),
515 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
516 "Acquire, AcquireRelease and SequentiallyConsistent"));
517 }
518
TEST_F(ValidateAtomics,AtomicStoreVulkanAcquireRelease)519 TEST_F(ValidateAtomics, AtomicStoreVulkanAcquireRelease) {
520 const std::string body = R"(
521 OpAtomicStore %u32_var %device %acquire_release %u32_1
522 )";
523
524 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
525 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
526 EXPECT_THAT(
527 getDiagnosticString(),
528 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
529 "Acquire, AcquireRelease and SequentiallyConsistent"));
530 }
531
TEST_F(ValidateAtomics,AtomicStoreVulkanSequentiallyConsistent)532 TEST_F(ValidateAtomics, AtomicStoreVulkanSequentiallyConsistent) {
533 const std::string body = R"(
534 OpAtomicStore %u32_var %device %sequentially_consistent %u32_1
535 )";
536
537 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
538 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
539 EXPECT_THAT(
540 getDiagnosticString(),
541 HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
542 "Acquire, AcquireRelease and SequentiallyConsistent"));
543 }
544
TEST_F(ValidateAtomics,AtomicStoreWebGPUSuccess)545 TEST_F(ValidateAtomics, AtomicStoreWebGPUSuccess) {
546 const std::string body = R"(
547 OpAtomicStore %u32_var %device %release %u32_1
548 )";
549
550 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
551 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_WEBGPU_0));
552 }
553
TEST_F(ValidateAtomics,AtomicStoreWebGPUSequentiallyConsistent)554 TEST_F(ValidateAtomics, AtomicStoreWebGPUSequentiallyConsistent) {
555 const std::string body = R"(
556 OpAtomicStore %u32_var %device %sequentially_consistent %u32_1
557 )";
558
559 CompileSuccessfully(GenerateWebGPUShaderCode(body), SPV_ENV_WEBGPU_0);
560 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_WEBGPU_0));
561 EXPECT_THAT(
562 getDiagnosticString(),
563 HasSubstr(
564 "WebGPU spec disallows any bit masks in Memory Semantics that are "
565 "not Acquire, Release, AcquireRelease, UniformMemory, "
566 "WorkgroupMemory, ImageMemory, OutputMemoryKHR, MakeAvailableKHR, or "
567 "MakeVisibleKHR\n OpAtomicStore %29 %uint_1_0 %uint_16 %uint_1\n"));
568 }
569
TEST_F(ValidateAtomics,AtomicStoreWrongPointerType)570 TEST_F(ValidateAtomics, AtomicStoreWrongPointerType) {
571 const std::string body = R"(
572 OpAtomicStore %f32_1 %device %relaxed %f32_1
573 )";
574
575 CompileSuccessfully(GenerateKernelCode(body));
576 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
577 EXPECT_THAT(
578 getDiagnosticString(),
579 HasSubstr("AtomicStore: expected Pointer to be of type OpTypePointer"));
580 }
581
TEST_F(ValidateAtomics,AtomicStoreWrongPointerDataType)582 TEST_F(ValidateAtomics, AtomicStoreWrongPointerDataType) {
583 const std::string body = R"(
584 OpAtomicStore %f32vec4_var %device %relaxed %f32_1
585 )";
586
587 CompileSuccessfully(GenerateKernelCode(body));
588 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
589 EXPECT_THAT(
590 getDiagnosticString(),
591 HasSubstr("AtomicStore: "
592 "expected Pointer to be a pointer to int or float scalar "
593 "type"));
594 }
595
TEST_F(ValidateAtomics,AtomicStoreWrongPointerStorageType)596 TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageType) {
597 const std::string body = R"(
598 OpAtomicStore %f32_uc_var %device %relaxed %f32_1
599 )";
600
601 CompileSuccessfully(GenerateKernelCode(body));
602 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
603 EXPECT_THAT(
604 getDiagnosticString(),
605 HasSubstr("AtomicStore: expected Pointer Storage Class to be Uniform, "
606 "Workgroup, CrossWorkgroup, Generic, AtomicCounter, Image or "
607 "StorageBuffer"));
608 }
609
TEST_F(ValidateAtomics,AtomicStoreWrongScopeType)610 TEST_F(ValidateAtomics, AtomicStoreWrongScopeType) {
611 const std::string body = R"(
612 OpAtomicStore %f32_var %f32_1 %relaxed %f32_1
613 )";
614
615 CompileSuccessfully(GenerateKernelCode(body));
616 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
617 EXPECT_THAT(
618 getDiagnosticString(),
619 HasSubstr("AtomicStore: expected Memory Scope to be a 32-bit int\n "
620 "OpAtomicStore %28 %float_1 %uint_0_1 %float_1\n"));
621 }
622
TEST_F(ValidateAtomics,AtomicStoreWrongMemorySemanticsType)623 TEST_F(ValidateAtomics, AtomicStoreWrongMemorySemanticsType) {
624 const std::string body = R"(
625 OpAtomicStore %f32_var %device %f32_1 %f32_1
626 )";
627
628 CompileSuccessfully(GenerateKernelCode(body));
629 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
630 EXPECT_THAT(
631 getDiagnosticString(),
632 HasSubstr("AtomicStore: expected Memory Semantics to be a 32-bit int"));
633 }
634
TEST_F(ValidateAtomics,AtomicStoreWrongValueType)635 TEST_F(ValidateAtomics, AtomicStoreWrongValueType) {
636 const std::string body = R"(
637 OpAtomicStore %f32_var %device %relaxed %u32_1
638 )";
639
640 CompileSuccessfully(GenerateKernelCode(body));
641 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
642 EXPECT_THAT(
643 getDiagnosticString(),
644 HasSubstr("AtomicStore: "
645 "expected Value type and the type pointed to by Pointer to "
646 "be the same"));
647 }
648
TEST_F(ValidateAtomics,AtomicExchangeShaderSuccess)649 TEST_F(ValidateAtomics, AtomicExchangeShaderSuccess) {
650 const std::string body = R"(
651 %val1 = OpAtomicStore %u32_var %device %relaxed %u32_1
652 %val2 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
653 )";
654
655 CompileSuccessfully(GenerateShaderCode(body));
656 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
657 }
658
TEST_F(ValidateAtomics,AtomicExchangeKernelSuccess)659 TEST_F(ValidateAtomics, AtomicExchangeKernelSuccess) {
660 const std::string body = R"(
661 OpAtomicStore %f32_var %device %relaxed %f32_1
662 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
663 %val3 = OpAtomicStore %u32_var %device %relaxed %u32_1
664 %val4 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
665 )";
666
667 CompileSuccessfully(GenerateKernelCode(body));
668 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
669 }
670
TEST_F(ValidateAtomics,AtomicExchangeShaderFloat)671 TEST_F(ValidateAtomics, AtomicExchangeShaderFloat) {
672 const std::string body = R"(
673 OpAtomicStore %f32_var %device %relaxed %f32_1
674 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
675 )";
676
677 CompileSuccessfully(GenerateShaderCode(body));
678 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
679 EXPECT_THAT(getDiagnosticString(),
680 HasSubstr("AtomicExchange: "
681 "expected Result Type to be int scalar type"));
682 }
683
TEST_F(ValidateAtomics,AtomicExchangeWrongResultType)684 TEST_F(ValidateAtomics, AtomicExchangeWrongResultType) {
685 const std::string body = R"(
686 %val1 = OpStore %f32vec4_var %f32vec4_0000
687 %val2 = OpAtomicExchange %f32vec4 %f32vec4_var %device %relaxed %f32vec4_0000
688 )";
689
690 CompileSuccessfully(GenerateKernelCode(body));
691 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
692 EXPECT_THAT(getDiagnosticString(),
693 HasSubstr("AtomicExchange: "
694 "expected Result Type to be int or float scalar type"));
695 }
696
TEST_F(ValidateAtomics,AtomicExchangeWrongPointerType)697 TEST_F(ValidateAtomics, AtomicExchangeWrongPointerType) {
698 const std::string body = R"(
699 %val2 = OpAtomicExchange %f32 %f32vec4_ptr %device %relaxed %f32vec4_0000
700 )";
701
702 CompileSuccessfully(GenerateKernelCode(body));
703 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
704 EXPECT_THAT(getDiagnosticString(),
705 HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
706 "type"));
707 }
708
TEST_F(ValidateAtomics,AtomicExchangeWrongPointerDataType)709 TEST_F(ValidateAtomics, AtomicExchangeWrongPointerDataType) {
710 const std::string body = R"(
711 %val1 = OpStore %f32vec4_var %f32vec4_0000
712 %val2 = OpAtomicExchange %f32 %f32vec4_var %device %relaxed %f32vec4_0000
713 )";
714
715 CompileSuccessfully(GenerateKernelCode(body));
716 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
717 EXPECT_THAT(
718 getDiagnosticString(),
719 HasSubstr("AtomicExchange: "
720 "expected Pointer to point to a value of type Result Type"));
721 }
722
TEST_F(ValidateAtomics,AtomicExchangeWrongScopeType)723 TEST_F(ValidateAtomics, AtomicExchangeWrongScopeType) {
724 const std::string body = R"(
725 OpAtomicStore %f32_var %device %relaxed %f32_1
726 %val2 = OpAtomicExchange %f32 %f32_var %f32_1 %relaxed %f32_0
727 )";
728
729 CompileSuccessfully(GenerateKernelCode(body));
730 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
731 EXPECT_THAT(
732 getDiagnosticString(),
733 HasSubstr(
734 "AtomicExchange: expected Memory Scope to be a 32-bit int\n %40 = "
735 "OpAtomicExchange %float %28 %float_1 %uint_0_1 %float_0\n"));
736 }
737
TEST_F(ValidateAtomics,AtomicExchangeWrongMemorySemanticsType)738 TEST_F(ValidateAtomics, AtomicExchangeWrongMemorySemanticsType) {
739 const std::string body = R"(
740 OpAtomicStore %f32_var %device %relaxed %f32_1
741 %val2 = OpAtomicExchange %f32 %f32_var %device %f32_1 %f32_0
742 )";
743
744 CompileSuccessfully(GenerateKernelCode(body));
745 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
746 EXPECT_THAT(
747 getDiagnosticString(),
748 HasSubstr(
749 "AtomicExchange: expected Memory Semantics to be a 32-bit int"));
750 }
751
TEST_F(ValidateAtomics,AtomicExchangeWrongValueType)752 TEST_F(ValidateAtomics, AtomicExchangeWrongValueType) {
753 const std::string body = R"(
754 OpAtomicStore %f32_var %device %relaxed %f32_1
755 %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %u32_0
756 )";
757
758 CompileSuccessfully(GenerateKernelCode(body));
759 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
760 EXPECT_THAT(getDiagnosticString(),
761 HasSubstr("AtomicExchange: "
762 "expected Value to be of type Result Type"));
763 }
764
TEST_F(ValidateAtomics,AtomicCompareExchangeShaderSuccess)765 TEST_F(ValidateAtomics, AtomicCompareExchangeShaderSuccess) {
766 const std::string body = R"(
767 %val1 = OpAtomicStore %u32_var %device %relaxed %u32_1
768 %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
769 )";
770
771 CompileSuccessfully(GenerateShaderCode(body));
772 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
773 }
774
TEST_F(ValidateAtomics,AtomicCompareExchangeKernelSuccess)775 TEST_F(ValidateAtomics, AtomicCompareExchangeKernelSuccess) {
776 const std::string body = R"(
777 OpAtomicStore %f32_var %device %relaxed %f32_1
778 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
779 %val3 = OpAtomicStore %u32_var %device %relaxed %u32_1
780 %val4 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
781 )";
782
783 CompileSuccessfully(GenerateKernelCode(body));
784 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
785 }
786
TEST_F(ValidateAtomics,AtomicCompareExchangeShaderFloat)787 TEST_F(ValidateAtomics, AtomicCompareExchangeShaderFloat) {
788 const std::string body = R"(
789 OpAtomicStore %f32_var %device %relaxed %f32_1
790 %val1 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
791 )";
792
793 CompileSuccessfully(GenerateShaderCode(body));
794 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
795 EXPECT_THAT(getDiagnosticString(),
796 HasSubstr("AtomicCompareExchange: "
797 "expected Result Type to be int scalar type"));
798 }
799
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongResultType)800 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongResultType) {
801 const std::string body = R"(
802 %val1 = OpStore %f32vec4_var %f32vec4_0000
803 %val2 = OpAtomicCompareExchange %f32vec4 %f32vec4_var %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
804 )";
805
806 CompileSuccessfully(GenerateKernelCode(body));
807 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
808 EXPECT_THAT(getDiagnosticString(),
809 HasSubstr("AtomicCompareExchange: "
810 "expected Result Type to be int or float scalar type"));
811 }
812
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongPointerType)813 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerType) {
814 const std::string body = R"(
815 %val2 = OpAtomicCompareExchange %f32 %f32vec4_ptr %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
816 )";
817
818 CompileSuccessfully(GenerateKernelCode(body));
819 ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
820 EXPECT_THAT(getDiagnosticString(),
821 HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
822 "type"));
823 }
824
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongPointerDataType)825 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerDataType) {
826 const std::string body = R"(
827 %val1 = OpStore %f32vec4_var %f32vec4_0000
828 %val2 = OpAtomicCompareExchange %f32 %f32vec4_var %device %relaxed %relaxed %f32_0 %f32_1
829 )";
830
831 CompileSuccessfully(GenerateKernelCode(body));
832 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
833 EXPECT_THAT(
834 getDiagnosticString(),
835 HasSubstr("AtomicCompareExchange: "
836 "expected Pointer to point to a value of type Result Type"));
837 }
838
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongScopeType)839 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongScopeType) {
840 const std::string body = R"(
841 OpAtomicStore %f32_var %device %relaxed %f32_1
842 %val2 = OpAtomicCompareExchange %f32 %f32_var %f32_1 %relaxed %relaxed %f32_0 %f32_0
843 )";
844
845 CompileSuccessfully(GenerateKernelCode(body));
846 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
847 EXPECT_THAT(
848 getDiagnosticString(),
849 HasSubstr("AtomicCompareExchange: expected Memory Scope to be a 32-bit "
850 "int\n %40 = OpAtomicCompareExchange %float %28 %float_1 "
851 "%uint_0_1 %uint_0_1 %float_0 %float_0\n"));
852 }
853
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongMemorySemanticsType1)854 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType1) {
855 const std::string body = R"(
856 OpAtomicStore %f32_var %device %relaxed %f32_1
857 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %f32_1 %relaxed %f32_0 %f32_0
858 )";
859
860 CompileSuccessfully(GenerateKernelCode(body));
861 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
862 EXPECT_THAT(getDiagnosticString(),
863 HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
864 "be a 32-bit int"));
865 }
866
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongMemorySemanticsType2)867 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType2) {
868 const std::string body = R"(
869 OpAtomicStore %f32_var %device %relaxed %f32_1
870 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %f32_1 %f32_0 %f32_0
871 )";
872
873 CompileSuccessfully(GenerateKernelCode(body));
874 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
875 EXPECT_THAT(getDiagnosticString(),
876 HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
877 "be a 32-bit int"));
878 }
879
TEST_F(ValidateAtomics,AtomicCompareExchangeUnequalRelease)880 TEST_F(ValidateAtomics, AtomicCompareExchangeUnequalRelease) {
881 const std::string body = R"(
882 OpAtomicStore %f32_var %device %relaxed %f32_1
883 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %release %f32_0 %f32_0
884 )";
885
886 CompileSuccessfully(GenerateKernelCode(body));
887 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
888 EXPECT_THAT(getDiagnosticString(),
889 HasSubstr("AtomicCompareExchange: Memory Semantics Release and "
890 "AcquireRelease cannot be used for operand Unequal"));
891 }
892
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongValueType)893 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongValueType) {
894 const std::string body = R"(
895 OpAtomicStore %f32_var %device %relaxed %f32_1
896 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %u32_0 %f32_1
897 )";
898
899 CompileSuccessfully(GenerateKernelCode(body));
900 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
901 EXPECT_THAT(getDiagnosticString(),
902 HasSubstr("AtomicCompareExchange: "
903 "expected Value to be of type Result Type"));
904 }
905
TEST_F(ValidateAtomics,AtomicCompareExchangeWrongComparatorType)906 TEST_F(ValidateAtomics, AtomicCompareExchangeWrongComparatorType) {
907 const std::string body = R"(
908 OpAtomicStore %f32_var %device %relaxed %f32_1
909 %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %u32_1
910 )";
911
912 CompileSuccessfully(GenerateKernelCode(body));
913 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
914 EXPECT_THAT(getDiagnosticString(),
915 HasSubstr("AtomicCompareExchange: "
916 "expected Comparator to be of type Result Type"));
917 }
918
TEST_F(ValidateAtomics,AtomicCompareExchangeWeakSuccess)919 TEST_F(ValidateAtomics, AtomicCompareExchangeWeakSuccess) {
920 const std::string body = R"(
921 %val3 = OpAtomicStore %u32_var %device %relaxed %u32_1
922 %val4 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
923 )";
924
925 CompileSuccessfully(GenerateKernelCode(body));
926 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
927 }
928
TEST_F(ValidateAtomics,AtomicCompareExchangeWeakWrongResultType)929 TEST_F(ValidateAtomics, AtomicCompareExchangeWeakWrongResultType) {
930 const std::string body = R"(
931 OpAtomicStore %f32_var %device %relaxed %f32_1
932 %val2 = OpAtomicCompareExchangeWeak %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
933 )";
934
935 CompileSuccessfully(GenerateKernelCode(body));
936 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
937 EXPECT_THAT(getDiagnosticString(),
938 HasSubstr("AtomicCompareExchangeWeak: "
939 "expected Result Type to be int scalar type"));
940 }
941
TEST_F(ValidateAtomics,AtomicArithmeticsSuccess)942 TEST_F(ValidateAtomics, AtomicArithmeticsSuccess) {
943 const std::string body = R"(
944 OpAtomicStore %u32_var %device %relaxed %u32_1
945 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release
946 %val2 = OpAtomicIDecrement %u32 %u32_var %device %acquire_release
947 %val3 = OpAtomicIAdd %u32 %u32_var %device %acquire_release %u32_1
948 %val4 = OpAtomicISub %u32 %u32_var %device %acquire_release %u32_1
949 %val5 = OpAtomicUMin %u32 %u32_var %device %acquire_release %u32_1
950 %val6 = OpAtomicUMax %u32 %u32_var %device %acquire_release %u32_1
951 %val7 = OpAtomicSMin %u32 %u32_var %device %sequentially_consistent %u32_1
952 %val8 = OpAtomicSMax %u32 %u32_var %device %sequentially_consistent %u32_1
953 %val9 = OpAtomicAnd %u32 %u32_var %device %sequentially_consistent %u32_1
954 %val10 = OpAtomicOr %u32 %u32_var %device %sequentially_consistent %u32_1
955 %val11 = OpAtomicXor %u32 %u32_var %device %sequentially_consistent %u32_1
956 )";
957
958 CompileSuccessfully(GenerateKernelCode(body));
959 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
960 }
961
TEST_F(ValidateAtomics,AtomicFlagsSuccess)962 TEST_F(ValidateAtomics, AtomicFlagsSuccess) {
963 const std::string body = R"(
964 OpAtomicFlagClear %u32_var %device %release
965 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %relaxed
966 )";
967
968 CompileSuccessfully(GenerateKernelCode(body));
969 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
970 }
971
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongResultType)972 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongResultType) {
973 const std::string body = R"(
974 %val1 = OpAtomicFlagTestAndSet %u32 %u32_var %device %relaxed
975 )";
976
977 CompileSuccessfully(GenerateKernelCode(body));
978 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
979 EXPECT_THAT(getDiagnosticString(),
980 HasSubstr("AtomicFlagTestAndSet: "
981 "expected Result Type to be bool scalar type"));
982 }
983
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotPointer)984 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotPointer) {
985 const std::string body = R"(
986 %val1 = OpAtomicFlagTestAndSet %bool %u32_1 %device %relaxed
987 )";
988
989 CompileSuccessfully(GenerateKernelCode(body));
990 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
991 EXPECT_THAT(getDiagnosticString(),
992 HasSubstr("AtomicFlagTestAndSet: "
993 "expected Pointer to be of type OpTypePointer"));
994 }
995
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotIntPointer)996 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotIntPointer) {
997 const std::string body = R"(
998 %val1 = OpAtomicFlagTestAndSet %bool %f32_var %device %relaxed
999 )";
1000
1001 CompileSuccessfully(GenerateKernelCode(body));
1002 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1003 EXPECT_THAT(
1004 getDiagnosticString(),
1005 HasSubstr("AtomicFlagTestAndSet: "
1006 "expected Pointer to point to a value of 32-bit int type"));
1007 }
1008
TEST_F(ValidateAtomics,AtomicFlagTestAndSetNotInt32Pointer)1009 TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotInt32Pointer) {
1010 const std::string body = R"(
1011 %val1 = OpAtomicFlagTestAndSet %bool %u64_var %device %relaxed
1012 )";
1013
1014 CompileSuccessfully(GenerateKernelCode(body));
1015 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1016 EXPECT_THAT(
1017 getDiagnosticString(),
1018 HasSubstr("AtomicFlagTestAndSet: "
1019 "expected Pointer to point to a value of 32-bit int type"));
1020 }
1021
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongScopeType)1022 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongScopeType) {
1023 const std::string body = R"(
1024 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %u64_1 %relaxed
1025 )";
1026
1027 CompileSuccessfully(GenerateKernelCode(body));
1028 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1029 EXPECT_THAT(
1030 getDiagnosticString(),
1031 HasSubstr(
1032 "AtomicFlagTestAndSet: expected Memory Scope to be a 32-bit int\n "
1033 "%40 = OpAtomicFlagTestAndSet %bool %30 %ulong_1 %uint_0_1\n"));
1034 }
1035
TEST_F(ValidateAtomics,AtomicFlagTestAndSetWrongMemorySemanticsType)1036 TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongMemorySemanticsType) {
1037 const std::string body = R"(
1038 %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %u64_1
1039 )";
1040
1041 CompileSuccessfully(GenerateKernelCode(body));
1042 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1043 EXPECT_THAT(getDiagnosticString(),
1044 HasSubstr("AtomicFlagTestAndSet: "
1045 "expected Memory Semantics to be a 32-bit int"));
1046 }
1047
TEST_F(ValidateAtomics,AtomicFlagClearAcquire)1048 TEST_F(ValidateAtomics, AtomicFlagClearAcquire) {
1049 const std::string body = R"(
1050 OpAtomicFlagClear %u32_var %device %acquire
1051 )";
1052
1053 CompileSuccessfully(GenerateKernelCode(body));
1054 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1055 EXPECT_THAT(getDiagnosticString(),
1056 HasSubstr("Memory Semantics Acquire and AcquireRelease cannot be "
1057 "used with AtomicFlagClear"));
1058 }
1059
TEST_F(ValidateAtomics,AtomicFlagClearNotPointer)1060 TEST_F(ValidateAtomics, AtomicFlagClearNotPointer) {
1061 const std::string body = R"(
1062 OpAtomicFlagClear %u32_1 %device %relaxed
1063 )";
1064
1065 CompileSuccessfully(GenerateKernelCode(body));
1066 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1067 EXPECT_THAT(getDiagnosticString(),
1068 HasSubstr("AtomicFlagClear: "
1069 "expected Pointer to be of type OpTypePointer"));
1070 }
1071
TEST_F(ValidateAtomics,AtomicFlagClearNotIntPointer)1072 TEST_F(ValidateAtomics, AtomicFlagClearNotIntPointer) {
1073 const std::string body = R"(
1074 OpAtomicFlagClear %f32_var %device %relaxed
1075 )";
1076
1077 CompileSuccessfully(GenerateKernelCode(body));
1078 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1079 EXPECT_THAT(
1080 getDiagnosticString(),
1081 HasSubstr("AtomicFlagClear: "
1082 "expected Pointer to point to a value of 32-bit int type"));
1083 }
1084
TEST_F(ValidateAtomics,AtomicFlagClearNotInt32Pointer)1085 TEST_F(ValidateAtomics, AtomicFlagClearNotInt32Pointer) {
1086 const std::string body = R"(
1087 OpAtomicFlagClear %u64_var %device %relaxed
1088 )";
1089
1090 CompileSuccessfully(GenerateKernelCode(body));
1091 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1092 EXPECT_THAT(
1093 getDiagnosticString(),
1094 HasSubstr("AtomicFlagClear: "
1095 "expected Pointer to point to a value of 32-bit int type"));
1096 }
1097
TEST_F(ValidateAtomics,AtomicFlagClearWrongScopeType)1098 TEST_F(ValidateAtomics, AtomicFlagClearWrongScopeType) {
1099 const std::string body = R"(
1100 OpAtomicFlagClear %u32_var %u64_1 %relaxed
1101 )";
1102
1103 CompileSuccessfully(GenerateKernelCode(body));
1104 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1105 EXPECT_THAT(getDiagnosticString(),
1106 HasSubstr("AtomicFlagClear: expected Memory Scope to be a 32-bit "
1107 "int\n OpAtomicFlagClear %30 %ulong_1 %uint_0_1\n"));
1108 }
1109
TEST_F(ValidateAtomics,AtomicFlagClearWrongMemorySemanticsType)1110 TEST_F(ValidateAtomics, AtomicFlagClearWrongMemorySemanticsType) {
1111 const std::string body = R"(
1112 OpAtomicFlagClear %u32_var %device %u64_1
1113 )";
1114
1115 CompileSuccessfully(GenerateKernelCode(body));
1116 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1117 EXPECT_THAT(
1118 getDiagnosticString(),
1119 HasSubstr(
1120 "AtomicFlagClear: expected Memory Semantics to be a 32-bit int"));
1121 }
1122
TEST_F(ValidateAtomics,AtomicIIncrementAcquireAndRelease)1123 TEST_F(ValidateAtomics, AtomicIIncrementAcquireAndRelease) {
1124 const std::string body = R"(
1125 OpAtomicStore %u32_var %device %relaxed %u32_1
1126 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_and_release
1127 )";
1128
1129 CompileSuccessfully(GenerateKernelCode(body));
1130 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1131 EXPECT_THAT(getDiagnosticString(),
1132 HasSubstr("AtomicIIncrement: Memory Semantics can have at most "
1133 "one of the following bits set: Acquire, Release, "
1134 "AcquireRelease or SequentiallyConsistent\n %40 = "
1135 "OpAtomicIIncrement %uint %30 %uint_1_0 %uint_6\n"));
1136 }
1137
TEST_F(ValidateAtomics,AtomicUniformMemorySemanticsShader)1138 TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsShader) {
1139 const std::string body = R"(
1140 OpAtomicStore %u32_var %device %relaxed %u32_1
1141 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
1142 )";
1143
1144 CompileSuccessfully(GenerateShaderCode(body));
1145 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1146 }
1147
TEST_F(ValidateAtomics,AtomicUniformMemorySemanticsKernel)1148 TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsKernel) {
1149 const std::string body = R"(
1150 OpAtomicStore %u32_var %device %relaxed %u32_1
1151 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
1152 )";
1153
1154 CompileSuccessfully(GenerateKernelCode(body));
1155 ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1156 EXPECT_THAT(getDiagnosticString(),
1157 HasSubstr("AtomicIIncrement: Memory Semantics UniformMemory "
1158 "requires capability Shader"));
1159 }
1160
1161 // Disabling this test until
1162 // https://github.com/KhronosGroup/glslang/issues/1618 is resolved.
1163 // TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsNoCapability) {
1164 // const std::string body = R"(
1165 // OpAtomicStore %u32_var %device %relaxed %u32_1
1166 //%val1 = OpAtomicIIncrement %u32 %u32_var %device
1167 //%acquire_release_atomic_counter_workgroup
1168 //)";
1169 //
1170 // CompileSuccessfully(GenerateKernelCode(body));
1171 // ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1172 // EXPECT_THAT(
1173 // getDiagnosticString(),
1174 // HasSubstr("AtomicIIncrement: Memory Semantics AtomicCounterMemory "
1175 // "requires capability AtomicStorage\n %40 = OpAtomicIIncrement
1176 // "
1177 // "%uint %30 %uint_1_0 %uint_1288\n"));
1178 //}
1179
TEST_F(ValidateAtomics,AtomicCounterMemorySemanticsWithCapability)1180 TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsWithCapability) {
1181 const std::string body = R"(
1182 OpAtomicStore %u32_var %device %relaxed %u32_1
1183 %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_atomic_counter_workgroup
1184 )";
1185
1186 CompileSuccessfully(GenerateKernelCode(body, "OpCapability AtomicStorage\n"));
1187 ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
1188 }
1189
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicLoad)1190 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicLoad) {
1191 const std::string body = R"(
1192 %ld = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
1193 )";
1194
1195 const std::string extra = R"(
1196 OpCapability VulkanMemoryModelKHR
1197 OpExtension "SPV_KHR_vulkan_memory_model"
1198 )";
1199
1200 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1201 SPV_ENV_UNIVERSAL_1_3);
1202 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1203 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1204 EXPECT_THAT(getDiagnosticString(),
1205 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1206 "used with the VulkanKHR memory model."));
1207 }
1208
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicStore)1209 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicStore) {
1210 const std::string body = R"(
1211 OpAtomicStore %u32_var %workgroup %sequentially_consistent %u32_0
1212 )";
1213
1214 const std::string extra = R"(
1215 OpCapability VulkanMemoryModelKHR
1216 OpExtension "SPV_KHR_vulkan_memory_model"
1217 )";
1218
1219 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1220 SPV_ENV_UNIVERSAL_1_3);
1221 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1222 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1223 EXPECT_THAT(getDiagnosticString(),
1224 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1225 "used with the VulkanKHR memory model."));
1226 }
1227
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicExchange)1228 TEST_F(ValidateAtomics,
1229 VulkanMemoryModelBanSequentiallyConsistentAtomicExchange) {
1230 const std::string body = R"(
1231 %ex = OpAtomicExchange %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1232 )";
1233
1234 const std::string extra = R"(
1235 OpCapability VulkanMemoryModelKHR
1236 OpExtension "SPV_KHR_vulkan_memory_model"
1237 )";
1238
1239 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1240 SPV_ENV_UNIVERSAL_1_3);
1241 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1242 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1243 EXPECT_THAT(getDiagnosticString(),
1244 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1245 "used with the VulkanKHR memory model."));
1246 }
1247
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual)1248 TEST_F(ValidateAtomics,
1249 VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual) {
1250 const std::string body = R"(
1251 %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %sequentially_consistent %relaxed %u32_0 %u32_0
1252 )";
1253
1254 const std::string extra = R"(
1255 OpCapability VulkanMemoryModelKHR
1256 OpExtension "SPV_KHR_vulkan_memory_model"
1257 )";
1258
1259 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1260 SPV_ENV_UNIVERSAL_1_3);
1261 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1262 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1263 EXPECT_THAT(getDiagnosticString(),
1264 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1265 "used with the VulkanKHR memory model."));
1266 }
1267
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal)1268 TEST_F(ValidateAtomics,
1269 VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal) {
1270 const std::string body = R"(
1271 %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %relaxed %sequentially_consistent %u32_0 %u32_0
1272 )";
1273
1274 const std::string extra = R"(
1275 OpCapability VulkanMemoryModelKHR
1276 OpExtension "SPV_KHR_vulkan_memory_model"
1277 )";
1278
1279 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1280 SPV_ENV_UNIVERSAL_1_3);
1281 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1282 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1283 EXPECT_THAT(getDiagnosticString(),
1284 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1285 "used with the VulkanKHR memory model."));
1286 }
1287
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement)1288 TEST_F(ValidateAtomics,
1289 VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement) {
1290 const std::string body = R"(
1291 %inc = OpAtomicIIncrement %u32 %u32_var %workgroup %sequentially_consistent
1292 )";
1293
1294 const std::string extra = R"(
1295 OpCapability VulkanMemoryModelKHR
1296 OpExtension "SPV_KHR_vulkan_memory_model"
1297 )";
1298
1299 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1300 SPV_ENV_UNIVERSAL_1_3);
1301 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1302 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1303 EXPECT_THAT(getDiagnosticString(),
1304 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1305 "used with the VulkanKHR memory model."));
1306 }
1307
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement)1308 TEST_F(ValidateAtomics,
1309 VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement) {
1310 const std::string body = R"(
1311 %dec = OpAtomicIDecrement %u32 %u32_var %workgroup %sequentially_consistent
1312 )";
1313
1314 const std::string extra = R"(
1315 OpCapability VulkanMemoryModelKHR
1316 OpExtension "SPV_KHR_vulkan_memory_model"
1317 )";
1318
1319 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1320 SPV_ENV_UNIVERSAL_1_3);
1321 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1322 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1323 EXPECT_THAT(getDiagnosticString(),
1324 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1325 "used with the VulkanKHR memory model."));
1326 }
1327
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd)1328 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd) {
1329 const std::string body = R"(
1330 %add = OpAtomicIAdd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1331 )";
1332
1333 const std::string extra = R"(
1334 OpCapability VulkanMemoryModelKHR
1335 OpExtension "SPV_KHR_vulkan_memory_model"
1336 )";
1337
1338 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1339 SPV_ENV_UNIVERSAL_1_3);
1340 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1341 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1342 EXPECT_THAT(getDiagnosticString(),
1343 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1344 "used with the VulkanKHR memory model."));
1345 }
1346
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicISub)1347 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicISub) {
1348 const std::string body = R"(
1349 %sub = OpAtomicISub %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1350 )";
1351
1352 const std::string extra = R"(
1353 OpCapability VulkanMemoryModelKHR
1354 OpExtension "SPV_KHR_vulkan_memory_model"
1355 )";
1356
1357 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1358 SPV_ENV_UNIVERSAL_1_3);
1359 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1360 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1361 EXPECT_THAT(getDiagnosticString(),
1362 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1363 "used with the VulkanKHR memory model."));
1364 }
1365
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicSMin)1366 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMin) {
1367 const std::string body = R"(
1368 %min = OpAtomicSMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1369 )";
1370
1371 const std::string extra = R"(
1372 OpCapability VulkanMemoryModelKHR
1373 OpExtension "SPV_KHR_vulkan_memory_model"
1374 )";
1375
1376 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1377 SPV_ENV_UNIVERSAL_1_3);
1378 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1379 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1380 EXPECT_THAT(getDiagnosticString(),
1381 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1382 "used with the VulkanKHR memory model."));
1383 }
1384
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicUMin)1385 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMin) {
1386 const std::string body = R"(
1387 %min = OpAtomicUMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1388 )";
1389
1390 const std::string extra = R"(
1391 OpCapability VulkanMemoryModelKHR
1392 OpExtension "SPV_KHR_vulkan_memory_model"
1393 )";
1394
1395 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1396 SPV_ENV_UNIVERSAL_1_3);
1397 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1398 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1399 EXPECT_THAT(getDiagnosticString(),
1400 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1401 "used with the VulkanKHR memory model."));
1402 }
1403
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicSMax)1404 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMax) {
1405 const std::string body = R"(
1406 %max = OpAtomicSMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1407 )";
1408
1409 const std::string extra = R"(
1410 OpCapability VulkanMemoryModelKHR
1411 OpExtension "SPV_KHR_vulkan_memory_model"
1412 )";
1413
1414 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1415 SPV_ENV_UNIVERSAL_1_3);
1416 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1417 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1418 EXPECT_THAT(getDiagnosticString(),
1419 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1420 "used with the VulkanKHR memory model."));
1421 }
1422
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicUMax)1423 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMax) {
1424 const std::string body = R"(
1425 %max = OpAtomicUMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1426 )";
1427
1428 const std::string extra = R"(
1429 OpCapability VulkanMemoryModelKHR
1430 OpExtension "SPV_KHR_vulkan_memory_model"
1431 )";
1432
1433 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1434 SPV_ENV_UNIVERSAL_1_3);
1435 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1436 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1437 EXPECT_THAT(getDiagnosticString(),
1438 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1439 "used with the VulkanKHR memory model."));
1440 }
1441
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicAnd)1442 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicAnd) {
1443 const std::string body = R"(
1444 %and = OpAtomicAnd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1445 )";
1446
1447 const std::string extra = R"(
1448 OpCapability VulkanMemoryModelKHR
1449 OpExtension "SPV_KHR_vulkan_memory_model"
1450 )";
1451
1452 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1453 SPV_ENV_UNIVERSAL_1_3);
1454 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1455 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1456 EXPECT_THAT(getDiagnosticString(),
1457 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1458 "used with the VulkanKHR memory model."));
1459 }
1460
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicOr)1461 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicOr) {
1462 const std::string body = R"(
1463 %or = OpAtomicOr %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1464 )";
1465
1466 const std::string extra = R"(
1467 OpCapability VulkanMemoryModelKHR
1468 OpExtension "SPV_KHR_vulkan_memory_model"
1469 )";
1470
1471 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1472 SPV_ENV_UNIVERSAL_1_3);
1473 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1474 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1475 EXPECT_THAT(getDiagnosticString(),
1476 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1477 "used with the VulkanKHR memory model."));
1478 }
1479
TEST_F(ValidateAtomics,VulkanMemoryModelBanSequentiallyConsistentAtomicXor)1480 TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicXor) {
1481 const std::string body = R"(
1482 %xor = OpAtomicXor %u32 %u32_var %workgroup %sequentially_consistent %u32_0
1483 )";
1484
1485 const std::string extra = R"(
1486 OpCapability VulkanMemoryModelKHR
1487 OpExtension "SPV_KHR_vulkan_memory_model"
1488 )";
1489
1490 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1491 SPV_ENV_UNIVERSAL_1_3);
1492 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1493 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1494 EXPECT_THAT(getDiagnosticString(),
1495 HasSubstr("SequentiallyConsistent memory semantics cannot be "
1496 "used with the VulkanKHR memory model."));
1497 }
1498
TEST_F(ValidateAtomics,OutputMemoryKHRRequiresVulkanMemoryModelKHR)1499 TEST_F(ValidateAtomics, OutputMemoryKHRRequiresVulkanMemoryModelKHR) {
1500 const std::string text = R"(
1501 OpCapability Shader
1502 OpMemoryModel Logical GLSL450
1503 OpEntryPoint Fragment %1 "func"
1504 OpExecutionMode %1 OriginUpperLeft
1505 %2 = OpTypeVoid
1506 %3 = OpTypeInt 32 0
1507 %semantics = OpConstant %3 4100
1508 %5 = OpTypeFunction %2
1509 %workgroup = OpConstant %3 2
1510 %ptr = OpTypePointer Workgroup %3
1511 %var = OpVariable %ptr Workgroup
1512 %1 = OpFunction %2 None %5
1513 %7 = OpLabel
1514 OpAtomicStore %var %workgroup %semantics %workgroup
1515 OpReturn
1516 OpFunctionEnd
1517 )";
1518
1519 CompileSuccessfully(text);
1520 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1521 EXPECT_THAT(getDiagnosticString(),
1522 HasSubstr("AtomicStore: Memory Semantics OutputMemoryKHR "
1523 "requires capability VulkanMemoryModelKHR"));
1524 }
1525
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresVulkanMemoryModelKHR)1526 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresVulkanMemoryModelKHR) {
1527 const std::string text = R"(
1528 OpCapability Shader
1529 OpMemoryModel Logical GLSL450
1530 OpEntryPoint Fragment %1 "func"
1531 OpExecutionMode %1 OriginUpperLeft
1532 %2 = OpTypeVoid
1533 %3 = OpTypeInt 32 0
1534 %semantics = OpConstant %3 8196
1535 %5 = OpTypeFunction %2
1536 %workgroup = OpConstant %3 2
1537 %ptr = OpTypePointer Workgroup %3
1538 %var = OpVariable %ptr Workgroup
1539 %1 = OpFunction %2 None %5
1540 %7 = OpLabel
1541 OpAtomicStore %var %workgroup %semantics %workgroup
1542 OpReturn
1543 OpFunctionEnd
1544 )";
1545
1546 CompileSuccessfully(text);
1547 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1548 EXPECT_THAT(getDiagnosticString(),
1549 HasSubstr("AtomicStore: Memory Semantics MakeAvailableKHR "
1550 "requires capability VulkanMemoryModelKHR"));
1551 }
1552
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresVulkanMemoryModelKHR)1553 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresVulkanMemoryModelKHR) {
1554 const std::string text = R"(
1555 OpCapability Shader
1556 OpMemoryModel Logical GLSL450
1557 OpEntryPoint Fragment %1 "func"
1558 OpExecutionMode %1 OriginUpperLeft
1559 %2 = OpTypeVoid
1560 %3 = OpTypeInt 32 0
1561 %semantics = OpConstant %3 16386
1562 %5 = OpTypeFunction %2
1563 %workgroup = OpConstant %3 2
1564 %ptr = OpTypePointer Workgroup %3
1565 %var = OpVariable %ptr Workgroup
1566 %1 = OpFunction %2 None %5
1567 %7 = OpLabel
1568 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1569 OpReturn
1570 OpFunctionEnd
1571 )";
1572
1573 CompileSuccessfully(text);
1574 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1575 EXPECT_THAT(getDiagnosticString(),
1576 HasSubstr("AtomicLoad: Memory Semantics MakeVisibleKHR requires "
1577 "capability VulkanMemoryModelKHR"));
1578 }
1579
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresReleaseSemantics)1580 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresReleaseSemantics) {
1581 const std::string text = R"(
1582 OpCapability Shader
1583 OpCapability VulkanMemoryModelKHR
1584 OpExtension "SPV_KHR_vulkan_memory_model"
1585 OpMemoryModel Logical VulkanKHR
1586 OpEntryPoint Fragment %1 "func"
1587 OpExecutionMode %1 OriginUpperLeft
1588 %2 = OpTypeVoid
1589 %3 = OpTypeInt 32 0
1590 %semantics = OpConstant %3 8448
1591 %5 = OpTypeFunction %2
1592 %workgroup = OpConstant %3 2
1593 %ptr = OpTypePointer Workgroup %3
1594 %var = OpVariable %ptr Workgroup
1595 %1 = OpFunction %2 None %5
1596 %7 = OpLabel
1597 OpAtomicStore %var %workgroup %semantics %workgroup
1598 OpReturn
1599 OpFunctionEnd
1600 )";
1601
1602 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1603 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1604 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1605 EXPECT_THAT(
1606 getDiagnosticString(),
1607 HasSubstr("AtomicStore: MakeAvailableKHR Memory Semantics also requires "
1608 "either Release or AcquireRelease Memory Semantics"));
1609 }
1610
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresAcquireSemantics)1611 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresAcquireSemantics) {
1612 const std::string text = R"(
1613 OpCapability Shader
1614 OpCapability VulkanMemoryModelKHR
1615 OpExtension "SPV_KHR_vulkan_memory_model"
1616 OpMemoryModel Logical VulkanKHR
1617 OpEntryPoint Fragment %1 "func"
1618 OpExecutionMode %1 OriginUpperLeft
1619 %2 = OpTypeVoid
1620 %3 = OpTypeInt 32 0
1621 %semantics = OpConstant %3 16640
1622 %5 = OpTypeFunction %2
1623 %workgroup = OpConstant %3 2
1624 %ptr = OpTypePointer Workgroup %3
1625 %var = OpVariable %ptr Workgroup
1626 %1 = OpFunction %2 None %5
1627 %7 = OpLabel
1628 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1629 OpReturn
1630 OpFunctionEnd
1631 )";
1632
1633 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1634 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1635 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1636 EXPECT_THAT(
1637 getDiagnosticString(),
1638 HasSubstr("AtomicLoad: MakeVisibleKHR Memory Semantics also requires "
1639 "either Acquire or AcquireRelease Memory Semantics"));
1640 }
1641
TEST_F(ValidateAtomics,MakeAvailableKHRRequiresStorageSemantics)1642 TEST_F(ValidateAtomics, MakeAvailableKHRRequiresStorageSemantics) {
1643 const std::string text = R"(
1644 OpCapability Shader
1645 OpCapability VulkanMemoryModelKHR
1646 OpExtension "SPV_KHR_vulkan_memory_model"
1647 OpMemoryModel Logical VulkanKHR
1648 OpEntryPoint Fragment %1 "func"
1649 OpExecutionMode %1 OriginUpperLeft
1650 %2 = OpTypeVoid
1651 %3 = OpTypeInt 32 0
1652 %semantics = OpConstant %3 8196
1653 %5 = OpTypeFunction %2
1654 %workgroup = OpConstant %3 2
1655 %ptr = OpTypePointer Workgroup %3
1656 %var = OpVariable %ptr Workgroup
1657 %1 = OpFunction %2 None %5
1658 %7 = OpLabel
1659 OpAtomicStore %var %workgroup %semantics %workgroup
1660 OpReturn
1661 OpFunctionEnd
1662 )";
1663
1664 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1665 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1666 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1667 EXPECT_THAT(
1668 getDiagnosticString(),
1669 HasSubstr(
1670 "AtomicStore: expected Memory Semantics to include a storage class"));
1671 }
1672
TEST_F(ValidateAtomics,MakeVisibleKHRRequiresStorageSemantics)1673 TEST_F(ValidateAtomics, MakeVisibleKHRRequiresStorageSemantics) {
1674 const std::string text = R"(
1675 OpCapability Shader
1676 OpCapability VulkanMemoryModelKHR
1677 OpExtension "SPV_KHR_vulkan_memory_model"
1678 OpMemoryModel Logical VulkanKHR
1679 OpEntryPoint Fragment %1 "func"
1680 OpExecutionMode %1 OriginUpperLeft
1681 %2 = OpTypeVoid
1682 %3 = OpTypeInt 32 0
1683 %semantics = OpConstant %3 16386
1684 %5 = OpTypeFunction %2
1685 %workgroup = OpConstant %3 2
1686 %ptr = OpTypePointer Workgroup %3
1687 %var = OpVariable %ptr Workgroup
1688 %1 = OpFunction %2 None %5
1689 %7 = OpLabel
1690 %ld = OpAtomicLoad %3 %var %workgroup %semantics
1691 OpReturn
1692 OpFunctionEnd
1693 )";
1694
1695 CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
1696 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1697 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1698 EXPECT_THAT(
1699 getDiagnosticString(),
1700 HasSubstr(
1701 "AtomicLoad: expected Memory Semantics to include a storage class"));
1702 }
1703
TEST_F(ValidateAtomics,VulkanMemoryModelAllowsQueueFamilyKHR)1704 TEST_F(ValidateAtomics, VulkanMemoryModelAllowsQueueFamilyKHR) {
1705 const std::string body = R"(
1706 %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
1707 )";
1708
1709 const std::string extra = R"(
1710 OpCapability VulkanMemoryModelKHR
1711 OpExtension "SPV_KHR_vulkan_memory_model"
1712 )";
1713
1714 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1715 SPV_ENV_VULKAN_1_1);
1716 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_1));
1717 }
1718
TEST_F(ValidateAtomics,NonVulkanMemoryModelDisallowsQueueFamilyKHR)1719 TEST_F(ValidateAtomics, NonVulkanMemoryModelDisallowsQueueFamilyKHR) {
1720 const std::string body = R"(
1721 %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
1722 )";
1723
1724 CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_1);
1725 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_1));
1726 EXPECT_THAT(getDiagnosticString(),
1727 HasSubstr("AtomicAnd: Memory Scope QueueFamilyKHR requires "
1728 "capability VulkanMemoryModelKHR\n %42 = OpAtomicAnd "
1729 "%uint %29 %uint_5 %uint_0_1 %uint_1\n"));
1730 }
1731
TEST_F(ValidateAtomics,SemanticsSpecConstantShader)1732 TEST_F(ValidateAtomics, SemanticsSpecConstantShader) {
1733 const std::string spirv = R"(
1734 OpCapability Shader
1735 OpMemoryModel Logical GLSL450
1736 OpEntryPoint Fragment %func "func"
1737 OpExecutionMode %func OriginUpperLeft
1738 %void = OpTypeVoid
1739 %int = OpTypeInt 32 0
1740 %spec_const = OpSpecConstant %int 0
1741 %workgroup = OpConstant %int 2
1742 %ptr_int_workgroup = OpTypePointer Workgroup %int
1743 %var = OpVariable %ptr_int_workgroup Workgroup
1744 %voidfn = OpTypeFunction %void
1745 %func = OpFunction %void None %voidfn
1746 %entry = OpLabel
1747 %ld = OpAtomicLoad %int %var %workgroup %spec_const
1748 OpReturn
1749 OpFunctionEnd
1750 )";
1751
1752 CompileSuccessfully(spirv);
1753 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1754 EXPECT_THAT(getDiagnosticString(),
1755 HasSubstr("Memory Semantics ids must be OpConstant when Shader "
1756 "capability is present"));
1757 }
1758
TEST_F(ValidateAtomics,SemanticsSpecConstantKernel)1759 TEST_F(ValidateAtomics, SemanticsSpecConstantKernel) {
1760 const std::string spirv = R"(
1761 OpCapability Kernel
1762 OpCapability Linkage
1763 OpMemoryModel Logical OpenCL
1764 %void = OpTypeVoid
1765 %int = OpTypeInt 32 0
1766 %spec_const = OpSpecConstant %int 0
1767 %workgroup = OpConstant %int 2
1768 %ptr_int_workgroup = OpTypePointer Workgroup %int
1769 %var = OpVariable %ptr_int_workgroup Workgroup
1770 %voidfn = OpTypeFunction %void
1771 %func = OpFunction %void None %voidfn
1772 %entry = OpLabel
1773 %ld = OpAtomicLoad %int %var %workgroup %spec_const
1774 OpReturn
1775 OpFunctionEnd
1776 )";
1777
1778 CompileSuccessfully(spirv);
1779 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
1780 }
1781
TEST_F(ValidateAtomics,ScopeSpecConstantShader)1782 TEST_F(ValidateAtomics, ScopeSpecConstantShader) {
1783 const std::string spirv = R"(
1784 OpCapability Shader
1785 OpMemoryModel Logical GLSL450
1786 OpEntryPoint Fragment %func "func"
1787 OpExecutionMode %func OriginUpperLeft
1788 %void = OpTypeVoid
1789 %int = OpTypeInt 32 0
1790 %spec_const = OpSpecConstant %int 0
1791 %relaxed = OpConstant %int 0
1792 %ptr_int_workgroup = OpTypePointer Workgroup %int
1793 %var = OpVariable %ptr_int_workgroup Workgroup
1794 %voidfn = OpTypeFunction %void
1795 %func = OpFunction %void None %voidfn
1796 %entry = OpLabel
1797 %ld = OpAtomicLoad %int %var %spec_const %relaxed
1798 OpReturn
1799 OpFunctionEnd
1800 )";
1801
1802 CompileSuccessfully(spirv);
1803 EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
1804 EXPECT_THAT(
1805 getDiagnosticString(),
1806 HasSubstr(
1807 "Scope ids must be OpConstant when Shader capability is present"));
1808 }
1809
TEST_F(ValidateAtomics,ScopeSpecConstantKernel)1810 TEST_F(ValidateAtomics, ScopeSpecConstantKernel) {
1811 const std::string spirv = R"(
1812 OpCapability Kernel
1813 OpCapability Linkage
1814 OpMemoryModel Logical OpenCL
1815 %void = OpTypeVoid
1816 %int = OpTypeInt 32 0
1817 %spec_const = OpSpecConstant %int 0
1818 %relaxed = OpConstant %int 0
1819 %ptr_int_workgroup = OpTypePointer Workgroup %int
1820 %var = OpVariable %ptr_int_workgroup Workgroup
1821 %voidfn = OpTypeFunction %void
1822 %func = OpFunction %void None %voidfn
1823 %entry = OpLabel
1824 %ld = OpAtomicLoad %int %var %spec_const %relaxed
1825 OpReturn
1826 OpFunctionEnd
1827 )";
1828
1829 CompileSuccessfully(spirv);
1830 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
1831 }
1832
TEST_F(ValidateAtomics,VulkanMemoryModelDeviceScopeBad)1833 TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeBad) {
1834 const std::string body = R"(
1835 %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
1836 )";
1837
1838 const std::string extra = R"(OpCapability VulkanMemoryModelKHR
1839 OpExtension "SPV_KHR_vulkan_memory_model"
1840 )";
1841
1842 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1843 SPV_ENV_UNIVERSAL_1_3);
1844 EXPECT_EQ(SPV_ERROR_INVALID_DATA,
1845 ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1846 EXPECT_THAT(
1847 getDiagnosticString(),
1848 HasSubstr("Use of device scope with VulkanKHR memory model requires the "
1849 "VulkanMemoryModelDeviceScopeKHR capability"));
1850 }
1851
TEST_F(ValidateAtomics,VulkanMemoryModelDeviceScopeGood)1852 TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeGood) {
1853 const std::string body = R"(
1854 %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
1855 )";
1856
1857 const std::string extra = R"(OpCapability VulkanMemoryModelKHR
1858 OpCapability VulkanMemoryModelDeviceScopeKHR
1859 OpExtension "SPV_KHR_vulkan_memory_model"
1860 )";
1861
1862 CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
1863 SPV_ENV_UNIVERSAL_1_3);
1864 EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
1865 }
1866
1867 } // namespace
1868 } // namespace val
1869 } // namespace spvtools
1870