1 /*
2 * Copyright 2015 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8 #include "GrVkUtil.h"
9
10 #include "vk/GrVkGpu.h"
11 #include "SkSLCompiler.h"
12
GrPixelConfigToVkFormat(GrPixelConfig config,VkFormat * format)13 bool GrPixelConfigToVkFormat(GrPixelConfig config, VkFormat* format) {
14 VkFormat dontCare;
15 if (!format) {
16 format = &dontCare;
17 }
18
19 switch (config) {
20 case kUnknown_GrPixelConfig:
21 return false;
22 case kRGBA_8888_GrPixelConfig:
23 *format = VK_FORMAT_R8G8B8A8_UNORM;
24 return true;
25 case kRGB_888_GrPixelConfig:
26 *format = VK_FORMAT_R8G8B8_UNORM;
27 return true;
28 case kRGB_888X_GrPixelConfig:
29 *format = VK_FORMAT_R8G8B8A8_UNORM;
30 return true;
31 case kRG_88_GrPixelConfig:
32 *format = VK_FORMAT_R8G8_UNORM;
33 return true;
34 case kBGRA_8888_GrPixelConfig:
35 *format = VK_FORMAT_B8G8R8A8_UNORM;
36 return true;
37 case kSRGBA_8888_GrPixelConfig:
38 *format = VK_FORMAT_R8G8B8A8_SRGB;
39 return true;
40 case kSBGRA_8888_GrPixelConfig:
41 *format = VK_FORMAT_B8G8R8A8_SRGB;
42 return true;
43 case kRGBA_1010102_GrPixelConfig:
44 *format = VK_FORMAT_A2B10G10R10_UNORM_PACK32;
45 return true;
46 case kRGB_565_GrPixelConfig:
47 *format = VK_FORMAT_R5G6B5_UNORM_PACK16;
48 return true;
49 case kRGBA_4444_GrPixelConfig:
50 // R4G4B4A4 is not required to be supported so we actually
51 // store the data is if it was B4G4R4A4 and swizzle in shaders
52 *format = VK_FORMAT_B4G4R4A4_UNORM_PACK16;
53 return true;
54 case kAlpha_8_GrPixelConfig: // fall through
55 case kAlpha_8_as_Red_GrPixelConfig:
56 *format = VK_FORMAT_R8_UNORM;
57 return true;
58 case kAlpha_8_as_Alpha_GrPixelConfig:
59 return false;
60 case kGray_8_GrPixelConfig:
61 case kGray_8_as_Red_GrPixelConfig:
62 *format = VK_FORMAT_R8_UNORM;
63 return true;
64 case kGray_8_as_Lum_GrPixelConfig:
65 return false;
66 case kRGBA_float_GrPixelConfig:
67 *format = VK_FORMAT_R32G32B32A32_SFLOAT;
68 return true;
69 case kRG_float_GrPixelConfig:
70 *format = VK_FORMAT_R32G32_SFLOAT;
71 return true;
72 case kRGBA_half_GrPixelConfig:
73 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
74 return true;
75 case kRGBA_half_Clamped_GrPixelConfig:
76 *format = VK_FORMAT_R16G16B16A16_SFLOAT;
77 return true;
78 case kRGB_ETC1_GrPixelConfig:
79 // converting to ETC2 which is a superset of ETC1
80 *format = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
81 return true;
82 case kAlpha_half_GrPixelConfig: // fall through
83 case kAlpha_half_as_Red_GrPixelConfig:
84 *format = VK_FORMAT_R16_SFLOAT;
85 return true;
86 }
87 SK_ABORT("Unexpected config");
88 return false;
89 }
90
91 #ifdef SK_DEBUG
GrVkFormatPixelConfigPairIsValid(VkFormat format,GrPixelConfig config)92 bool GrVkFormatPixelConfigPairIsValid(VkFormat format, GrPixelConfig config) {
93 switch (format) {
94 case VK_FORMAT_R8G8B8A8_UNORM:
95 return kRGBA_8888_GrPixelConfig == config ||
96 kRGB_888X_GrPixelConfig == config;
97 case VK_FORMAT_B8G8R8A8_UNORM:
98 return kBGRA_8888_GrPixelConfig == config;
99 case VK_FORMAT_R8G8B8A8_SRGB:
100 return kSRGBA_8888_GrPixelConfig == config;
101 case VK_FORMAT_B8G8R8A8_SRGB:
102 return kSBGRA_8888_GrPixelConfig == config;
103 case VK_FORMAT_R8G8B8_UNORM:
104 return kRGB_888_GrPixelConfig == config;
105 case VK_FORMAT_R8G8_UNORM:
106 return kRG_88_GrPixelConfig == config;
107 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
108 return kRGBA_1010102_GrPixelConfig == config;
109 case VK_FORMAT_R5G6B5_UNORM_PACK16:
110 return kRGB_565_GrPixelConfig == config;
111 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
112 // R4G4B4A4 is not required to be supported so we actually
113 // store RGBA_4444 data as B4G4R4A4.
114 return kRGBA_4444_GrPixelConfig == config;
115 case VK_FORMAT_R8_UNORM:
116 return kAlpha_8_GrPixelConfig == config ||
117 kAlpha_8_as_Red_GrPixelConfig == config ||
118 kGray_8_GrPixelConfig == config ||
119 kGray_8_as_Red_GrPixelConfig == config;
120 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
121 return kRGB_ETC1_GrPixelConfig == config;
122 case VK_FORMAT_R32G32B32A32_SFLOAT:
123 return kRGBA_float_GrPixelConfig == config;
124 case VK_FORMAT_R32G32_SFLOAT:
125 return kRG_float_GrPixelConfig == config;
126 case VK_FORMAT_R16G16B16A16_SFLOAT:
127 return kRGBA_half_GrPixelConfig == config ||
128 kRGBA_half_Clamped_GrPixelConfig == config;
129 case VK_FORMAT_R16_SFLOAT:
130 return kAlpha_half_GrPixelConfig == config ||
131 kAlpha_half_as_Red_GrPixelConfig == config;
132 default:
133 return false;
134 }
135 }
136 #endif
137
GrVkFormatIsSupported(VkFormat format)138 bool GrVkFormatIsSupported(VkFormat format) {
139 switch (format) {
140 case VK_FORMAT_R8G8B8A8_UNORM:
141 case VK_FORMAT_B8G8R8A8_UNORM:
142 case VK_FORMAT_R8G8B8A8_SRGB:
143 case VK_FORMAT_B8G8R8A8_SRGB:
144 case VK_FORMAT_R8G8B8A8_SINT:
145 case VK_FORMAT_R8G8B8_UNORM:
146 case VK_FORMAT_R8G8_UNORM:
147 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
148 case VK_FORMAT_R5G6B5_UNORM_PACK16:
149 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
150 case VK_FORMAT_R8_UNORM:
151 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
152 case VK_FORMAT_R32G32B32A32_SFLOAT:
153 case VK_FORMAT_R32G32_SFLOAT:
154 case VK_FORMAT_R16G16B16A16_SFLOAT:
155 case VK_FORMAT_R16_SFLOAT:
156 return true;
157 default:
158 return false;
159 }
160 }
161
GrSampleCountToVkSampleCount(uint32_t samples,VkSampleCountFlagBits * vkSamples)162 bool GrSampleCountToVkSampleCount(uint32_t samples, VkSampleCountFlagBits* vkSamples) {
163 SkASSERT(samples >= 1);
164 switch (samples) {
165 case 1:
166 *vkSamples = VK_SAMPLE_COUNT_1_BIT;
167 return true;
168 case 2:
169 *vkSamples = VK_SAMPLE_COUNT_2_BIT;
170 return true;
171 case 4:
172 *vkSamples = VK_SAMPLE_COUNT_4_BIT;
173 return true;
174 case 8:
175 *vkSamples = VK_SAMPLE_COUNT_8_BIT;
176 return true;
177 case 16:
178 *vkSamples = VK_SAMPLE_COUNT_16_BIT;
179 return true;
180 case 32:
181 *vkSamples = VK_SAMPLE_COUNT_32_BIT;
182 return true;
183 case 64:
184 *vkSamples = VK_SAMPLE_COUNT_64_BIT;
185 return true;
186 default:
187 return false;
188 }
189 }
190
vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage)191 SkSL::Program::Kind vk_shader_stage_to_skiasl_kind(VkShaderStageFlagBits stage) {
192 if (VK_SHADER_STAGE_VERTEX_BIT == stage) {
193 return SkSL::Program::kVertex_Kind;
194 }
195 if (VK_SHADER_STAGE_GEOMETRY_BIT == stage) {
196 return SkSL::Program::kGeometry_Kind;
197 }
198 SkASSERT(VK_SHADER_STAGE_FRAGMENT_BIT == stage);
199 return SkSL::Program::kFragment_Kind;
200 }
201
GrCompileVkShaderModule(const GrVkGpu * gpu,const char * shaderString,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo,const SkSL::Program::Settings & settings,SkSL::String * outSPIRV,SkSL::Program::Inputs * outInputs)202 bool GrCompileVkShaderModule(const GrVkGpu* gpu,
203 const char* shaderString,
204 VkShaderStageFlagBits stage,
205 VkShaderModule* shaderModule,
206 VkPipelineShaderStageCreateInfo* stageInfo,
207 const SkSL::Program::Settings& settings,
208 SkSL::String* outSPIRV,
209 SkSL::Program::Inputs* outInputs) {
210 std::unique_ptr<SkSL::Program> program = gpu->shaderCompiler()->convertProgram(
211 vk_shader_stage_to_skiasl_kind(stage),
212 SkSL::String(shaderString),
213 settings);
214 if (!program) {
215 printf("%s\n", shaderString);
216 SkDebugf("SkSL error:\n%s\n", gpu->shaderCompiler()->errorText().c_str());
217 SkASSERT(false);
218 }
219 *outInputs = program->fInputs;
220 if (!gpu->shaderCompiler()->toSPIRV(*program, outSPIRV)) {
221 SkDebugf("%s\n", gpu->shaderCompiler()->errorText().c_str());
222 return false;
223 }
224
225 return GrInstallVkShaderModule(gpu, *outSPIRV, stage, shaderModule, stageInfo);
226 }
227
GrInstallVkShaderModule(const GrVkGpu * gpu,const SkSL::String & spirv,VkShaderStageFlagBits stage,VkShaderModule * shaderModule,VkPipelineShaderStageCreateInfo * stageInfo)228 bool GrInstallVkShaderModule(const GrVkGpu* gpu,
229 const SkSL::String& spirv,
230 VkShaderStageFlagBits stage,
231 VkShaderModule* shaderModule,
232 VkPipelineShaderStageCreateInfo* stageInfo) {
233 VkShaderModuleCreateInfo moduleCreateInfo;
234 memset(&moduleCreateInfo, 0, sizeof(VkShaderModuleCreateInfo));
235 moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
236 moduleCreateInfo.pNext = nullptr;
237 moduleCreateInfo.flags = 0;
238 moduleCreateInfo.codeSize = spirv.size();
239 moduleCreateInfo.pCode = (const uint32_t*)spirv.c_str();
240
241 VkResult err = GR_VK_CALL(gpu->vkInterface(), CreateShaderModule(gpu->device(),
242 &moduleCreateInfo,
243 nullptr,
244 shaderModule));
245 if (err) {
246 return false;
247 }
248
249 memset(stageInfo, 0, sizeof(VkPipelineShaderStageCreateInfo));
250 stageInfo->sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
251 stageInfo->pNext = nullptr;
252 stageInfo->flags = 0;
253 stageInfo->stage = stage;
254 stageInfo->module = *shaderModule;
255 stageInfo->pName = "main";
256 stageInfo->pSpecializationInfo = nullptr;
257
258 return true;
259 }
260