1 // Copyright 2018 The Amber Authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #include "src/vkscript/parser.h"
16 
17 #include <algorithm>
18 #include <cassert>
19 #include <limits>
20 #include <tuple>
21 #include <utility>
22 #include <vector>
23 
24 #include "src/make_unique.h"
25 #include "src/shader.h"
26 #include "src/type_parser.h"
27 #include "src/vkscript/command_parser.h"
28 
29 namespace amber {
30 namespace vkscript {
31 namespace {
32 
33 uint32_t kDefaultFrameBufferSize = 250;
34 const char kDefaultPipelineName[] = "vk_pipeline";
35 
36 }  // namespace
37 
Parser()38 Parser::Parser() : amber::Parser(nullptr) {}
Parser(Delegate * delegate)39 Parser::Parser(Delegate* delegate) : amber::Parser(delegate) {}
40 
41 Parser::~Parser() = default;
42 
make_error(const Tokenizer & tokenizer,const std::string & err)43 std::string Parser::make_error(const Tokenizer& tokenizer,
44                                const std::string& err) {
45   return std::to_string(tokenizer.GetCurrentLine()) + ": " + err;
46 }
47 
Parse(const std::string & input)48 Result Parser::Parse(const std::string& input) {
49   SectionParser section_parser;
50   Result r = section_parser.Parse(input);
51   if (!r.IsSuccess())
52     return r;
53 
54   r = GenerateDefaultPipeline(section_parser);
55   if (!r.IsSuccess())
56     return r;
57 
58   for (const auto& section : section_parser.Sections()) {
59     r = ProcessSection(section);
60     if (!r.IsSuccess())
61       return r;
62   }
63 
64   if (!skip_validation_for_test_) {
65     for (const auto& pipeline : script_->GetPipelines()) {
66       r = pipeline->Validate();
67       if (!r.IsSuccess())
68         return r;
69     }
70   }
71 
72   return {};
73 }
74 
GenerateDefaultPipeline(const SectionParser & section_parser)75 Result Parser::GenerateDefaultPipeline(const SectionParser& section_parser) {
76   // Generate a pipeline for VkScript.
77   PipelineType pipeline_type = PipelineType::kCompute;
78   for (const auto& section : section_parser.Sections()) {
79     if (!SectionParser::HasShader(section.section_type))
80       continue;
81 
82     if (section.shader_type != kShaderTypeCompute) {
83       pipeline_type = PipelineType::kGraphics;
84       break;
85     }
86   }
87 
88   auto new_pipeline = MakeUnique<Pipeline>(pipeline_type);
89   auto* pipeline = new_pipeline.get();
90   pipeline->SetName(kDefaultPipelineName);
91   pipeline->SetFramebufferWidth(kDefaultFrameBufferSize);
92   pipeline->SetFramebufferHeight(kDefaultFrameBufferSize);
93 
94   Result r = script_->AddPipeline(std::move(new_pipeline));
95   if (!r.IsSuccess())
96     return r;
97 
98   // Generate and add a framebuffer
99   auto color_buf = pipeline->GenerateDefaultColorAttachmentBuffer();
100   r = pipeline->AddColorAttachment(color_buf.get(), 0, 0);
101   if (!r.IsSuccess())
102     return r;
103 
104   r = script_->AddBuffer(std::move(color_buf));
105   if (!r.IsSuccess())
106     return r;
107 
108   return {};
109 }
110 
ProcessSection(const SectionParser::Section & section)111 Result Parser::ProcessSection(const SectionParser::Section& section) {
112   // Should never get here, but skip it anyway.
113   if (section.section_type == NodeType::kComment)
114     return {};
115 
116   if (SectionParser::HasShader(section.section_type))
117     return ProcessShaderBlock(section);
118   if (section.section_type == NodeType::kRequire)
119     return ProcessRequireBlock(section);
120   if (section.section_type == NodeType::kIndices)
121     return ProcessIndicesBlock(section);
122   if (section.section_type == NodeType::kVertexData)
123     return ProcessVertexDataBlock(section);
124   if (section.section_type == NodeType::kTest)
125     return ProcessTestBlock(section);
126 
127   return Result("Unknown node type ....");
128 }
129 
ProcessShaderBlock(const SectionParser::Section & section)130 Result Parser::ProcessShaderBlock(const SectionParser::Section& section) {
131   assert(SectionParser::HasShader(section.section_type));
132 
133   auto shader = MakeUnique<Shader>(section.shader_type);
134   // Generate a unique name for the shader.
135   shader->SetName("vk_shader_" + std::to_string(script_->GetShaders().size()));
136   shader->SetFormat(section.format);
137   shader->SetData(section.contents);
138 
139   Result r = script_->GetPipeline(kDefaultPipelineName)
140                  ->AddShader(shader.get(), shader->GetType());
141   if (!r.IsSuccess())
142     return r;
143 
144   r = script_->AddShader(std::move(shader));
145   if (!r.IsSuccess())
146     return r;
147 
148   return {};
149 }
150 
ProcessRequireBlock(const SectionParser::Section & section)151 Result Parser::ProcessRequireBlock(const SectionParser::Section& section) {
152   Tokenizer tokenizer(section.contents);
153   tokenizer.SetCurrentLine(section.starting_line_number + 1);
154 
155   for (auto token = tokenizer.NextToken(); !token->IsEOS();
156        token = tokenizer.NextToken()) {
157     if (token->IsEOL())
158       continue;
159     if (!token->IsIdentifier()) {
160       return Result(make_error(
161           tokenizer,
162           "Invalid token in requirements block: " + token->ToOriginalString()));
163     }
164 
165     std::string str = token->AsString();
166     if (script_->IsKnownFeature(str)) {
167       script_->AddRequiredFeature(str);
168     } else if (str == Pipeline::kGeneratedColorBuffer) {
169       token = tokenizer.NextToken();
170       if (!token->IsIdentifier())
171         return Result(make_error(tokenizer, "Missing framebuffer format"));
172 
173       TypeParser type_parser;
174       auto type = type_parser.Parse(token->AsString());
175       if (type == nullptr) {
176         return Result(
177             make_error(tokenizer, "Failed to parse framebuffer format: " +
178                                       token->ToOriginalString()));
179       }
180 
181       auto fmt = MakeUnique<Format>(type.get());
182       script_->GetPipeline(kDefaultPipelineName)
183           ->GetColorAttachments()[0]
184           .buffer->SetFormat(fmt.get());
185       script_->RegisterFormat(std::move(fmt));
186       script_->RegisterType(std::move(type));
187 
188     } else if (str == "depthstencil") {
189       token = tokenizer.NextToken();
190       if (!token->IsIdentifier())
191         return Result(make_error(tokenizer, "Missing depthStencil format"));
192 
193       TypeParser type_parser;
194       auto type = type_parser.Parse(token->AsString());
195       if (type == nullptr) {
196         return Result(
197             make_error(tokenizer, "Failed to parse depthstencil format: " +
198                                       token->ToOriginalString()));
199       }
200 
201       auto* pipeline = script_->GetPipeline(kDefaultPipelineName);
202       if (pipeline->GetDepthStencilBuffer().buffer != nullptr)
203         return Result("Only one depthstencil command allowed");
204 
205       auto fmt = MakeUnique<Format>(type.get());
206       // Generate and add a depth buffer
207       auto depth_buf = pipeline->GenerateDefaultDepthStencilAttachmentBuffer();
208       depth_buf->SetFormat(fmt.get());
209       script_->RegisterFormat(std::move(fmt));
210       script_->RegisterType(std::move(type));
211 
212       Result r = pipeline->SetDepthStencilBuffer(depth_buf.get());
213       if (!r.IsSuccess())
214         return r;
215 
216       r = script_->AddBuffer(std::move(depth_buf));
217       if (!r.IsSuccess())
218         return r;
219 
220     } else if (str == "fence_timeout") {
221       token = tokenizer.NextToken();
222       if (!token->IsInteger())
223         return Result(make_error(tokenizer, "Missing fence_timeout value"));
224 
225       script_->GetEngineData().fence_timeout_ms = token->AsUint32();
226 
227     } else if (str == "fbsize") {
228       auto* pipeline = script_->GetPipeline(kDefaultPipelineName);
229 
230       token = tokenizer.NextToken();
231       if (token->IsEOL() || token->IsEOS()) {
232         return Result(make_error(
233             tokenizer, "Missing width and height for fbsize command"));
234       }
235       if (!token->IsInteger()) {
236         return Result(
237             make_error(tokenizer, "Invalid width for fbsize command"));
238       }
239 
240       pipeline->SetFramebufferWidth(token->AsUint32());
241 
242       token = tokenizer.NextToken();
243       if (token->IsEOL() || token->IsEOS()) {
244         return Result(
245             make_error(tokenizer, "Missing height for fbsize command"));
246       }
247       if (!token->IsInteger()) {
248         return Result(
249             make_error(tokenizer, "Invalid height for fbsize command"));
250       }
251 
252       pipeline->SetFramebufferHeight(token->AsUint32());
253 
254     } else {
255       auto it = std::find_if(str.begin(), str.end(),
256                              [](char c) { return !(isalnum(c) || c == '_'); });
257       if (it != str.end()) {
258         return Result(
259             make_error(tokenizer, "Unknown feature or extension: " + str));
260       }
261 
262       script_->AddRequiredExtension(str);
263     }
264 
265     token = tokenizer.NextToken();
266     if (!token->IsEOS() && !token->IsEOL()) {
267       return Result(make_error(
268           tokenizer, "Failed to parse requirements block: invalid token: " +
269                          token->ToOriginalString()));
270     }
271   }
272   return {};
273 }
274 
ProcessIndicesBlock(const SectionParser::Section & section)275 Result Parser::ProcessIndicesBlock(const SectionParser::Section& section) {
276   std::vector<Value> indices;
277 
278   Tokenizer tokenizer(section.contents);
279   tokenizer.SetCurrentLine(section.starting_line_number);
280   for (auto token = tokenizer.NextToken(); !token->IsEOS();
281        token = tokenizer.NextToken()) {
282     if (token->IsEOL())
283       continue;
284 
285     if (!token->IsInteger())
286       return Result(make_error(tokenizer, "Invalid value in indices block: " +
287                                               token->ToOriginalString()));
288     if (token->AsUint64() >
289         static_cast<uint64_t>(std::numeric_limits<uint16_t>::max())) {
290       return Result(make_error(tokenizer, "Value too large in indices block: " +
291                                               token->ToOriginalString()));
292     }
293 
294     indices.push_back(Value());
295     indices.back().SetIntValue(token->AsUint16());
296   }
297 
298   if (!indices.empty()) {
299     TypeParser parser;
300     auto type = parser.Parse("R32_UINT");
301     auto fmt = MakeUnique<Format>(type.get());
302     auto b = MakeUnique<Buffer>();
303     auto* buf = b.get();
304     b->SetName("indices");
305     b->SetFormat(fmt.get());
306     b->SetData(std::move(indices));
307     script_->RegisterFormat(std::move(fmt));
308     script_->RegisterType(std::move(type));
309 
310     Result r = script_->AddBuffer(std::move(b));
311     if (!r.IsSuccess())
312       return r;
313 
314     script_->GetPipeline(kDefaultPipelineName)->SetIndexBuffer(buf);
315   }
316 
317   return {};
318 }
319 
ProcessVertexDataBlock(const SectionParser::Section & section)320 Result Parser::ProcessVertexDataBlock(const SectionParser::Section& section) {
321   Tokenizer tokenizer(section.contents);
322   tokenizer.SetCurrentLine(section.starting_line_number);
323 
324   // Skip blank and comment lines
325   auto token = tokenizer.NextToken();
326   while (token->IsEOL())
327     token = tokenizer.NextToken();
328 
329   // Skip empty vertex data blocks
330   if (token->IsEOS())
331     return {};
332 
333   // Process the header line.
334   struct Header {
335     uint8_t location;
336     Format* format;
337   };
338   std::vector<Header> headers;
339   while (!token->IsEOL() && !token->IsEOS()) {
340     // Because of the way the tokenizer works we'll see a number then a string
341     // the string will start with a slash which we have to remove.
342     if (!token->IsInteger()) {
343       return Result(
344           make_error(tokenizer, "Unable to process vertex data header: " +
345                                     token->ToOriginalString()));
346     }
347 
348     uint8_t loc = token->AsUint8();
349 
350     token = tokenizer.NextToken();
351     if (!token->IsIdentifier()) {
352       return Result(
353           make_error(tokenizer, "Unable to process vertex data header: " +
354                                     token->ToOriginalString()));
355     }
356 
357     std::string fmt_name = token->AsString();
358     if (fmt_name.size() < 2)
359       return Result(make_error(tokenizer, "Vertex data format too short: " +
360                                               token->ToOriginalString()));
361 
362     TypeParser parser;
363     auto type = parser.Parse(fmt_name.substr(1, fmt_name.length()));
364     if (!type) {
365       return Result(
366           make_error(tokenizer, "Invalid format in vertex data header: " +
367                                     fmt_name.substr(1, fmt_name.length())));
368     }
369 
370     auto fmt = MakeUnique<Format>(type.get());
371     headers.push_back({loc, fmt.get()});
372     script_->RegisterFormat(std::move(fmt));
373     script_->RegisterType(std::move(type));
374 
375     token = tokenizer.NextToken();
376   }
377 
378   // Create a number of vectors equal to the number of headers.
379   std::vector<std::vector<Value>> values;
380   values.resize(headers.size());
381 
382   // Process data lines
383   for (; !token->IsEOS(); token = tokenizer.NextToken()) {
384     if (token->IsEOL())
385       continue;
386 
387     for (size_t j = 0; j < headers.size(); ++j) {
388       const auto& header = headers[j];
389       auto& value_data = values[j];
390 
391       auto* type = header.format->GetType();
392       if (type->IsList() && type->AsList()->IsPacked()) {
393         if (!token->IsHex()) {
394           return Result(
395               make_error(tokenizer, "Invalid packed value in Vertex Data: " +
396                                         token->ToOriginalString()));
397         }
398 
399         Value v;
400         v.SetIntValue(token->AsHex());
401         value_data.push_back(v);
402       } else {
403         auto& segs = header.format->GetSegments();
404         for (const auto& seg : segs) {
405           if (seg.IsPadding())
406             continue;
407 
408           if (token->IsEOS() || token->IsEOL()) {
409             return Result(make_error(tokenizer,
410                                      "Too few cells in given vertex data row"));
411           }
412 
413           Value v;
414           if (seg.GetFormatMode() == FormatMode::kUFloat ||
415               seg.GetFormatMode() == FormatMode::kSFloat) {
416             Result r = token->ConvertToDouble();
417             if (!r.IsSuccess())
418               return r;
419 
420             v.SetDoubleValue(token->AsDouble());
421           } else if (token->IsInteger()) {
422             v.SetIntValue(token->AsUint64());
423           } else {
424             return Result(make_error(tokenizer, "Invalid vertex data value: " +
425                                                     token->ToOriginalString()));
426           }
427 
428           value_data.push_back(v);
429           token = tokenizer.NextToken();
430         }
431       }
432     }
433   }
434 
435   auto* pipeline = script_->GetPipeline(kDefaultPipelineName);
436   for (size_t i = 0; i < headers.size(); ++i) {
437     auto buffer = MakeUnique<Buffer>();
438     auto* buf = buffer.get();
439     buffer->SetName("Vertices" + std::to_string(i));
440     buffer->SetFormat(headers[i].format);
441     Result r = buffer->SetData(std::move(values[i]));
442     if (!r.IsSuccess())
443       return r;
444 
445     script_->AddBuffer(std::move(buffer));
446 
447     pipeline->AddVertexBuffer(buf, headers[i].location, InputRate::kVertex,
448                               buf->GetFormat(), 0,
449                               buf->GetFormat()->SizeInBytes());
450   }
451 
452   return {};
453 }
454 
ProcessTestBlock(const SectionParser::Section & section)455 Result Parser::ProcessTestBlock(const SectionParser::Section& section) {
456   auto* pipeline = script_->GetPipeline(kDefaultPipelineName);
457   CommandParser cp(script_.get(), pipeline, section.starting_line_number + 1,
458                    section.contents);
459   Result r = cp.Parse();
460   if (!r.IsSuccess())
461     return r;
462 
463   script_->SetCommands(cp.TakeCommands());
464 
465   return {};
466 }
467 
468 }  // namespace vkscript
469 }  // namespace amber
470