1 /*
2  * Copyright 2015 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "GrVkUtil.h"
9 
10 #include "vk/GrVkGpu.h"
11 #include "SkSLCompiler.h"
12 
GrPixelConfigToVkFormat(GrPixelConfig config,VkFormat * format)13 bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
14     VkFormat dontCare;
15     if (!format) {
16         format = &dontCare;
17     }
18 
19     switch (config) {
20         case kUnknown_GrPixelConfig:
21             return false;
22         case kRGBA_8888_GrPixelConfig:
23             *format = VK_FORMAT_R8G8B8A8_UNORM;
24             return true;
25         case kBGRA_8888_GrPixelConfig:
26             *format = VK_FORMAT_B8G8R8A8_UNORM;
27             return true;
28         case kSRGBA_8888_GrPixelConfig:
29             *format = VK_FORMAT_R8G8B8A8_SRGB;
30             return true;
31         case kSBGRA_8888_GrPixelConfig:
32             *format = VK_FORMAT_B8G8R8A8_SRGB;
33             return true;
34         case kRGBA_8888_sint_GrPixelConfig:
35             *format = VK_FORMAT_R8G8B8A8_SINT;
36             return true;
37         case kRGB_565_GrPixelConfig:
38             *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
39             return true;
40         case kRGBA_4444_GrPixelConfig:
41             // R4G4B4A4 is not required to be supported so we actually
42             // store the data is if it was B4G4R4A4 and swizzle in shaders
43             *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
44             return true;
45         case kAlpha_8_GrPixelConfig:
46             *format = VK_FORMAT_R8_UNORM;
47             return true;
48         case kGray_8_GrPixelConfig:
49             *format = VK_FORMAT_R8_UNORM;
50             return true;
51         case kETC1_GrPixelConfig:
52             // converting to ETC2 which is a superset of ETC1
53             *format = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
54             return true;
55         case kRGBA_float_GrPixelConfig:
56             *format = VK_FORMAT_R32G32B32A32_SFLOAT;
57             return true;
58         case kRG_float_GrPixelConfig:
59             *format = VK_FORMAT_R32G32_SFLOAT;
60             return true;
61         case kRGBA_half_GrPixelConfig:
62             *format = VK_FORMAT_R16G16B16A16_SFLOAT;
63             return true;
64         case kAlpha_half_GrPixelConfig:
65             *format = VK_FORMAT_R16_SFLOAT;
66             return true;
67     }
68     SkFAIL("Unexpected config");
69     return false;
70 }
71 
GrVkFormatToPixelConfig(VkFormat format,GrPixelConfig * config)72 bool GrVkFormatToPixelConfig(VkFormat format, GrPixelConfig* config) {
73     GrPixelConfig dontCare;
74     if (!config) {
75         config = &dontCare;
76     }
77 
78     switch (format) {
79         case VK_FORMAT_R8G8B8A8_UNORM:
80             *config = kRGBA_8888_GrPixelConfig;
81             break;
82         case VK_FORMAT_B8G8R8A8_UNORM:
83             *config = kBGRA_8888_GrPixelConfig;
84             break;
85         case VK_FORMAT_R8G8B8A8_SRGB:
86             *config = kSRGBA_8888_GrPixelConfig;
87             break;
88         case VK_FORMAT_B8G8R8A8_SRGB:
89             *config = kSBGRA_8888_GrPixelConfig;
90             break;
91         case VK_FORMAT_R8G8B8A8_SINT:
92             *config = kRGBA_8888_sint_GrPixelConfig;
93             break;
94         case VK_FORMAT_R5G6B5_UNORM_PACK16:
95             *config = kRGB_565_GrPixelConfig;
96             break;
97         case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
98             // R4G4B4A4 is not required to be supported so we actually
99             // store RGBA_4444 data as B4G4R4A4.
100             *config = kRGBA_4444_GrPixelConfig;
101             break;
102         case VK_FORMAT_R8_UNORM:
103             *config = kAlpha_8_GrPixelConfig;
104             break;
105         case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
106             *config = kETC1_GrPixelConfig;      // this conversion seems a bit sketchy
107             break;
108         case VK_FORMAT_R32G32B32A32_SFLOAT:
109             *config = kRGBA_float_GrPixelConfig;
110             break;
111         case VK_FORMAT_R32G32_SFLOAT:
112             *config = kRG_float_GrPixelConfig;
113             break;
114         case VK_FORMAT_R16G16B16A16_SFLOAT:
115             *config = kRGBA_half_GrPixelConfig;
116             break;
117         case VK_FORMAT_R16_SFLOAT:
118             *config = kAlpha_half_GrPixelConfig;
119             break;
120         default:
121             return false;
122     }
123     return true;
124 }
125 
GrVkFormatIsSRGB(VkFormat format,VkFormat * linearFormat)126 bool GrVkFormatIsSRGB(VkFormat format, VkFormat* linearFormat) {
127     VkFormat linearFmt = format;
128     switch (format) {
129         case VK_FORMAT_R8_SRGB:
130             linearFmt = VK_FORMAT_R8_UNORM;
131             break;
132         case VK_FORMAT_R8G8_SRGB:
133             linearFmt = VK_FORMAT_R8G8_UNORM;
134             break;
135         case VK_FORMAT_R8G8B8_SRGB:
136             linearFmt = VK_FORMAT_R8G8B8_UNORM;
137             break;
138         case VK_FORMAT_B8G8R8_SRGB:
139             linearFmt = VK_FORMAT_B8G8R8_UNORM;
140             break;
141         case VK_FORMAT_R8G8B8A8_SRGB:
142             linearFmt = VK_FORMAT_R8G8B8A8_UNORM;
143             break;
144         case VK_FORMAT_B8G8R8A8_SRGB:
145             linearFmt = VK_FORMAT_B8G8R8A8_UNORM;
146             break;
147         case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
148             linearFmt = VK_FORMAT_A8B8G8R8_UNORM_PACK32;
149             break;
150         case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
151             linearFmt = VK_FORMAT_BC1_RGB_UNORM_BLOCK;
152             break;
153         case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
154             linearFmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
155             break;
156         case VK_FORMAT_BC2_SRGB_BLOCK:
157             linearFmt = VK_FORMAT_BC2_UNORM_BLOCK;
158             break;
159         case VK_FORMAT_BC3_SRGB_BLOCK:
160             linearFmt = VK_FORMAT_BC3_UNORM_BLOCK;
161             break;
162         case VK_FORMAT_BC7_SRGB_BLOCK:
163             linearFmt = VK_FORMAT_BC7_UNORM_BLOCK;
164             break;
165         case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
166             linearFmt = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
167             break;
168         case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
169             linearFmt = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
170             break;
171         case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
172             linearFmt = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
173             break;
174         case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
175             linearFmt = VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
176             break;
177         case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
178             linearFmt = VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
179             break;
180         case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
181             linearFmt = VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
182             break;
183         case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
184             linearFmt = VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
185             break;
186         case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
187             linearFmt = VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
188             break;
189         case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
190             linearFmt = VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
191             break;
192         case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
193             linearFmt = VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
194             break;
195         case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
196             linearFmt = VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
197             break;
198         case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
199             linearFmt = VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
200             break;
201         case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
202             linearFmt = VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
203             break;
204         case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
205             linearFmt = VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
206             break;
207         case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
208             linearFmt = VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
209             break;
210         case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
211             linearFmt = VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
212             break;
213         case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
214             linearFmt = VK_FORMAT_ASTC_12x12_UNORM_BLOCK;
215             break;
216         default:
217             break;
218     }
219     if (linearFormat) {
220         *linearFormat = linearFmt;
221     }
222     return (linearFmt != format);
223 }
224 
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)225 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
226     switch (samples) {
227         case 0: // fall through
228         case 1:
229             *vkSamples = VK_SAMPLE_COUNT_1_BIT;
230             return true;
231         case 2:
232             *vkSamples = VK_SAMPLE_COUNT_2_BIT;
233             return true;
234         case 4:
235             *vkSamples = VK_SAMPLE_COUNT_4_BIT;
236             return true;
237         case 8:
238             *vkSamples = VK_SAMPLE_COUNT_8_BIT;
239             return true;
240         case 16:
241             *vkSamples = VK_SAMPLE_COUNT_16_BIT;
242             return true;
243         case 32:
244             *vkSamples = VK_SAMPLE_COUNT_32_BIT;
245             return true;
246         case 64:
247             *vkSamples = VK_SAMPLE_COUNT_64_BIT;
248             return true;
249         default:
250             return false;
251     }
252 }
253 
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)254 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
255     if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
256         return SkSL::Program::kVertex_Kind;
257     }
258     SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
259     return SkSL::Program::kFragment_Kind;
260 }
261 
skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind)262 VkShaderStageFlagBits skiasl_kind_to_vk_shader_stage(SkSL::Program::Kind kind) {
263     if (SkSL::Program::kVertex_Kind == kind) {
264         return VK_SHADER_STAGE_VERTEX_BIT;
265     }
266     SkASSERT(SkSL::Program::kFragment_Kind == kind);
267     return VK_SHADER_STAGE_FRAGMENT_BIT;
268 }
269 
GrCompileVkShaderModule(const GrVkGpu * gpu,const char * shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::Program::Inputs * outInputs)270 bool GrCompileVkShaderModule(const GrVkGpu* gpu,
271                              const char* shaderString,
272                              VkShaderStageFlagBits stage,
273                              VkShaderModule* shaderModule,
274                              VkPipelineShaderStageCreateInfo* stageInfo,
275                              const SkSL::Program::Settings& settings,
276                              SkSL::Program::Inputs* outInputs) {
277     std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
278                                                               vk_shader_stage_to_skiasl_kind(stage),
279                                                               SkString(shaderString),
280                                                               settings);
281     if (!program) {
282         SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str());
283         SkASSERT(false);
284     }
285     *outInputs = program->fInputs;
286     SkString code;
287     if (!gpu->shaderCompiler()->toSPIRV(*program, &code)) {
288         SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
289         return false;
290     }
291 
292     VkShaderModuleCreateInfo moduleCreateInfo;
293     memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
294     moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
295     moduleCreateInfo.pNext = nullptr;
296     moduleCreateInfo.flags = 0;
297     moduleCreateInfo.codeSize = code.size();
298     moduleCreateInfo.pCode = (const uint32_t*)code.c_str();
299 
300     VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
301                                                                      &moduleCreateInfo,
302                                                                      nullptr,
303                                                                      shaderModule));
304     if (err) {
305         return false;
306     }
307 
308     memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
309     stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
310     stageInfo->pNext = nullptr;
311     stageInfo->flags = 0;
312     stageInfo->stage = skiasl_kind_to_vk_shader_stage(program->fKind);
313     stageInfo->module = *shaderModule;
314     stageInfo->pName = "main";
315     stageInfo->pSpecializationInfo = nullptr;
316 
317     return true;
318 }
319