1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/signature.h"
6 
7 #include "src/bit-vector.h"
8 #include "src/flags.h"
9 #include "src/handles.h"
10 #include "src/zone/zone-containers.h"
11 
12 #include "src/wasm/ast-decoder.h"
13 #include "src/wasm/decoder.h"
14 #include "src/wasm/wasm-module.h"
15 #include "src/wasm/wasm-opcodes.h"
16 
17 #include "src/ostreams.h"
18 
19 #include "src/compiler/wasm-compiler.h"
20 
21 namespace v8 {
22 namespace internal {
23 namespace wasm {
24 
25 #if DEBUG
26 #define TRACE(...)                                    \
27   do {                                                \
28     if (FLAG_trace_wasm_decoder) PrintF(__VA_ARGS__); \
29   } while (false)
30 #else
31 #define TRACE(...)
32 #endif
33 
34 #define CHECK_PROTOTYPE_OPCODE(flag)                   \
35   if (module_ && module_->origin == kAsmJsOrigin) {    \
36     error("Opcode not supported for asmjs modules");   \
37   }                                                    \
38   if (!FLAG_##flag) {                                  \
39     error("Invalid opcode (enable with --" #flag ")"); \
40     break;                                             \
41   }
42 // TODO(titzer): this is only for intermediate migration.
43 #define IMPLICIT_FUNCTION_END 1
44 
45 // An SsaEnv environment carries the current local variable renaming
46 // as well as the current effect and control dependency in the TF graph.
47 // It maintains a control state that tracks whether the environment
48 // is reachable, has reached a control end, or has been merged.
49 struct SsaEnv {
50   enum State { kControlEnd, kUnreachable, kReached, kMerged };
51 
52   State state;
53   TFNode* control;
54   TFNode* effect;
55   TFNode** locals;
56 
gov8::internal::wasm::SsaEnv57   bool go() { return state >= kReached; }
Killv8::internal::wasm::SsaEnv58   void Kill(State new_state = kControlEnd) {
59     state = new_state;
60     locals = nullptr;
61     control = nullptr;
62     effect = nullptr;
63   }
SetNotMergedv8::internal::wasm::SsaEnv64   void SetNotMerged() {
65     if (state == kMerged) state = kReached;
66   }
67 };
68 
69 // An entry on the value stack.
70 struct Value {
71   const byte* pc;
72   TFNode* node;
73   LocalType type;
74 };
75 
76 struct TryInfo : public ZoneObject {
77   SsaEnv* catch_env;
78   TFNode* exception;
79 
TryInfov8::internal::wasm::TryInfo80   explicit TryInfo(SsaEnv* c) : catch_env(c), exception(nullptr) {}
81 };
82 
83 struct MergeValues {
84   uint32_t arity;
85   union {
86     Value* array;
87     Value first;
88   } vals;  // Either multiple values or a single value.
89 
firstv8::internal::wasm::MergeValues90   Value& first() {
91     DCHECK_GT(arity, 0u);
92     return arity == 1 ? vals.first : vals.array[0];
93   }
94 };
95 
96 static Value* NO_VALUE = nullptr;
97 
98 enum ControlKind { kControlIf, kControlBlock, kControlLoop, kControlTry };
99 
100 // An entry on the control stack (i.e. if, block, loop).
101 struct Control {
102   const byte* pc;
103   ControlKind kind;
104   int stack_depth;    // stack height at the beginning of the construct.
105   SsaEnv* end_env;    // end environment for the construct.
106   SsaEnv* false_env;  // false environment (only for if).
107   TryInfo* try_info;  // Information used for compiling try statements.
108   int32_t previous_catch;  // The previous Control (on the stack) with a catch.
109 
110   // Values merged into the end of this control construct.
111   MergeValues merge;
112 
is_ifv8::internal::wasm::Control113   inline bool is_if() const { return kind == kControlIf; }
is_blockv8::internal::wasm::Control114   inline bool is_block() const { return kind == kControlBlock; }
is_loopv8::internal::wasm::Control115   inline bool is_loop() const { return kind == kControlLoop; }
is_tryv8::internal::wasm::Control116   inline bool is_try() const { return kind == kControlTry; }
117 
118   // Named constructors.
Blockv8::internal::wasm::Control119   static Control Block(const byte* pc, int stack_depth, SsaEnv* end_env,
120                        int32_t previous_catch) {
121     return {pc,      kControlBlock, stack_depth,    end_env,
122             nullptr, nullptr,       previous_catch, {0, {NO_VALUE}}};
123   }
124 
Ifv8::internal::wasm::Control125   static Control If(const byte* pc, int stack_depth, SsaEnv* end_env,
126                     SsaEnv* false_env, int32_t previous_catch) {
127     return {pc,        kControlIf, stack_depth,    end_env,
128             false_env, nullptr,    previous_catch, {0, {NO_VALUE}}};
129   }
130 
Loopv8::internal::wasm::Control131   static Control Loop(const byte* pc, int stack_depth, SsaEnv* end_env,
132                       int32_t previous_catch) {
133     return {pc,      kControlLoop, stack_depth,    end_env,
134             nullptr, nullptr,      previous_catch, {0, {NO_VALUE}}};
135   }
136 
Tryv8::internal::wasm::Control137   static Control Try(const byte* pc, int stack_depth, SsaEnv* end_env,
138                      Zone* zone, SsaEnv* catch_env, int32_t previous_catch) {
139     DCHECK_NOT_NULL(catch_env);
140     TryInfo* try_info = new (zone) TryInfo(catch_env);
141     return {pc,      kControlTry, stack_depth,    end_env,
142             nullptr, try_info,    previous_catch, {0, {NO_VALUE}}};
143   }
144 };
145 
146 // Macros that build nodes only if there is a graph and the current SSA
147 // environment is reachable from start. This avoids problems with malformed
148 // TF graphs when decoding inputs that have unreachable code.
149 #define BUILD(func, ...) \
150   (build() ? CheckForException(builder_->func(__VA_ARGS__)) : nullptr)
151 #define BUILD0(func) (build() ? CheckForException(builder_->func()) : nullptr)
152 
153 struct LaneOperand {
154   uint8_t lane;
155   unsigned length;
156 
LaneOperandv8::internal::wasm::LaneOperand157   inline LaneOperand(Decoder* decoder, const byte* pc) {
158     lane = decoder->checked_read_u8(pc, 2, "lane");
159     length = 1;
160   }
161 };
162 
163 // Generic Wasm bytecode decoder with utilities for decoding operands,
164 // lengths, etc.
165 class WasmDecoder : public Decoder {
166  public:
WasmDecoder(ModuleEnv * module,FunctionSig * sig,const byte * start,const byte * end)167   WasmDecoder(ModuleEnv* module, FunctionSig* sig, const byte* start,
168               const byte* end)
169       : Decoder(start, end),
170         module_(module),
171         sig_(sig),
172         total_locals_(0),
173         local_types_(nullptr) {}
174   ModuleEnv* module_;
175   FunctionSig* sig_;
176   size_t total_locals_;
177   ZoneVector<LocalType>* local_types_;
178 
Validate(const byte * pc,LocalIndexOperand & operand)179   inline bool Validate(const byte* pc, LocalIndexOperand& operand) {
180     if (operand.index < total_locals_) {
181       if (local_types_) {
182         operand.type = local_types_->at(operand.index);
183       } else {
184         operand.type = kAstStmt;
185       }
186       return true;
187     }
188     error(pc, pc + 1, "invalid local index: %u", operand.index);
189     return false;
190   }
191 
Validate(const byte * pc,GlobalIndexOperand & operand)192   inline bool Validate(const byte* pc, GlobalIndexOperand& operand) {
193     ModuleEnv* m = module_;
194     if (m && m->module && operand.index < m->module->globals.size()) {
195       operand.global = &m->module->globals[operand.index];
196       operand.type = operand.global->type;
197       return true;
198     }
199     error(pc, pc + 1, "invalid global index: %u", operand.index);
200     return false;
201   }
202 
Complete(const byte * pc,CallFunctionOperand & operand)203   inline bool Complete(const byte* pc, CallFunctionOperand& operand) {
204     ModuleEnv* m = module_;
205     if (m && m->module && operand.index < m->module->functions.size()) {
206       operand.sig = m->module->functions[operand.index].sig;
207       return true;
208     }
209     return false;
210   }
211 
Validate(const byte * pc,CallFunctionOperand & operand)212   inline bool Validate(const byte* pc, CallFunctionOperand& operand) {
213     if (Complete(pc, operand)) {
214       return true;
215     }
216     error(pc, pc + 1, "invalid function index: %u", operand.index);
217     return false;
218   }
219 
Complete(const byte * pc,CallIndirectOperand & operand)220   inline bool Complete(const byte* pc, CallIndirectOperand& operand) {
221     ModuleEnv* m = module_;
222     if (m && m->module && operand.index < m->module->signatures.size()) {
223       operand.sig = m->module->signatures[operand.index];
224       return true;
225     }
226     return false;
227   }
228 
Validate(const byte * pc,CallIndirectOperand & operand)229   inline bool Validate(const byte* pc, CallIndirectOperand& operand) {
230     uint32_t table_index = 0;
231     if (!module_->IsValidTable(table_index)) {
232       error("function table has to exist to execute call_indirect");
233       return false;
234     }
235     if (Complete(pc, operand)) {
236       return true;
237     }
238     error(pc, pc + 1, "invalid signature index: #%u", operand.index);
239     return false;
240   }
241 
Validate(const byte * pc,BreakDepthOperand & operand,ZoneVector<Control> & control)242   inline bool Validate(const byte* pc, BreakDepthOperand& operand,
243                        ZoneVector<Control>& control) {
244     if (operand.depth < control.size()) {
245       operand.target = &control[control.size() - operand.depth - 1];
246       return true;
247     }
248     error(pc, pc + 1, "invalid break depth: %u", operand.depth);
249     return false;
250   }
251 
Validate(const byte * pc,BranchTableOperand & operand,size_t block_depth)252   bool Validate(const byte* pc, BranchTableOperand& operand,
253                 size_t block_depth) {
254     // TODO(titzer): add extra redundant validation for br_table here?
255     return true;
256   }
257 
Validate(const byte * pc,LaneOperand & operand)258   inline bool Validate(const byte* pc, LaneOperand& operand) {
259     if (operand.lane < 0 || operand.lane > 3) {
260       error(pc_, pc_ + 2, "invalid extract lane value");
261       return false;
262     } else {
263       return true;
264     }
265   }
266 
OpcodeLength(const byte * pc)267   unsigned OpcodeLength(const byte* pc) {
268     switch (static_cast<byte>(*pc)) {
269 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
270       FOREACH_LOAD_MEM_OPCODE(DECLARE_OPCODE_CASE)
271       FOREACH_STORE_MEM_OPCODE(DECLARE_OPCODE_CASE)
272 #undef DECLARE_OPCODE_CASE
273       {
274         MemoryAccessOperand operand(this, pc, UINT32_MAX);
275         return 1 + operand.length;
276       }
277       case kExprBr:
278       case kExprBrIf: {
279         BreakDepthOperand operand(this, pc);
280         return 1 + operand.length;
281       }
282       case kExprSetGlobal:
283       case kExprGetGlobal: {
284         GlobalIndexOperand operand(this, pc);
285         return 1 + operand.length;
286       }
287 
288       case kExprCallFunction: {
289         CallFunctionOperand operand(this, pc);
290         return 1 + operand.length;
291       }
292       case kExprCallIndirect: {
293         CallIndirectOperand operand(this, pc);
294         return 1 + operand.length;
295       }
296 
297       case kExprTry:
298       case kExprIf:  // fall thru
299       case kExprLoop:
300       case kExprBlock: {
301         BlockTypeOperand operand(this, pc);
302         return 1 + operand.length;
303       }
304 
305       case kExprSetLocal:
306       case kExprTeeLocal:
307       case kExprGetLocal:
308       case kExprCatch: {
309         LocalIndexOperand operand(this, pc);
310         return 1 + operand.length;
311       }
312       case kExprBrTable: {
313         BranchTableOperand operand(this, pc);
314         BranchTableIterator iterator(this, operand);
315         return 1 + iterator.length();
316       }
317       case kExprI32Const: {
318         ImmI32Operand operand(this, pc);
319         return 1 + operand.length;
320       }
321       case kExprI64Const: {
322         ImmI64Operand operand(this, pc);
323         return 1 + operand.length;
324       }
325       case kExprGrowMemory:
326       case kExprMemorySize: {
327         MemoryIndexOperand operand(this, pc);
328         return 1 + operand.length;
329       }
330       case kExprI8Const:
331         return 2;
332       case kExprF32Const:
333         return 5;
334       case kExprF64Const:
335         return 9;
336       case kSimdPrefix: {
337         byte simd_index = checked_read_u8(pc, 1, "simd_index");
338         WasmOpcode opcode =
339             static_cast<WasmOpcode>(kSimdPrefix << 8 | simd_index);
340         switch (opcode) {
341 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
342           FOREACH_SIMD_0_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
343 #undef DECLARE_OPCODE_CASE
344           {
345             return 2;
346           }
347 #define DECLARE_OPCODE_CASE(name, opcode, sig) case kExpr##name:
348           FOREACH_SIMD_1_OPERAND_OPCODE(DECLARE_OPCODE_CASE)
349 #undef DECLARE_OPCODE_CASE
350           {
351             return 3;
352           }
353           default:
354             error("invalid SIMD opcode");
355             return 2;
356         }
357       }
358       default:
359         return 1;
360     }
361   }
362 };
363 
364 static const int32_t kNullCatch = -1;
365 
366 // The full WASM decoder for bytecode. Both verifies bytecode and generates
367 // a TurboFan IR graph.
368 class WasmFullDecoder : public WasmDecoder {
369  public:
WasmFullDecoder(Zone * zone,TFBuilder * builder,const FunctionBody & body)370   WasmFullDecoder(Zone* zone, TFBuilder* builder, const FunctionBody& body)
371       : WasmDecoder(body.module, body.sig, body.start, body.end),
372         zone_(zone),
373         builder_(builder),
374         base_(body.base),
375         local_type_vec_(zone),
376         stack_(zone),
377         control_(zone),
378         last_end_found_(false),
379         current_catch_(kNullCatch) {
380     local_types_ = &local_type_vec_;
381   }
382 
Decode()383   bool Decode() {
384     base::ElapsedTimer decode_timer;
385     if (FLAG_trace_wasm_decode_time) {
386       decode_timer.Start();
387     }
388     stack_.clear();
389     control_.clear();
390 
391     if (end_ < pc_) {
392       error("function body end < start");
393       return false;
394     }
395 
396     DecodeLocalDecls();
397     InitSsaEnv();
398     DecodeFunctionBody();
399 
400     if (failed()) return TraceFailed();
401 
402 #if IMPLICIT_FUNCTION_END
403     // With implicit end support (old style), the function block
404     // remains on the stack. Other control blocks are an error.
405     if (control_.size() > 1) {
406       error(pc_, control_.back().pc, "unterminated control structure");
407       return TraceFailed();
408     }
409 
410     // Assume an implicit end to the function body block.
411     if (control_.size() == 1) {
412       Control* c = &control_.back();
413       if (ssa_env_->go()) {
414         FallThruTo(c);
415       }
416 
417       if (c->end_env->go()) {
418         // Push the end values onto the stack.
419         stack_.resize(c->stack_depth);
420         if (c->merge.arity == 1) {
421           stack_.push_back(c->merge.vals.first);
422         } else {
423           for (unsigned i = 0; i < c->merge.arity; i++) {
424             stack_.push_back(c->merge.vals.array[i]);
425           }
426         }
427 
428         TRACE("  @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
429         SetEnv("function:end", c->end_env);
430         DoReturn();
431         TRACE("\n");
432       }
433     }
434 #else
435     if (!control_.empty()) {
436       error(pc_, control_.back().pc, "unterminated control structure");
437       return TraceFailed();
438     }
439 
440     if (!last_end_found_) {
441       error("function body must end with \"end\" opcode.");
442       return false;
443     }
444 #endif
445 
446     if (FLAG_trace_wasm_decode_time) {
447       double ms = decode_timer.Elapsed().InMillisecondsF();
448       PrintF("wasm-decode %s (%0.3f ms)\n\n", ok() ? "ok" : "failed", ms);
449     } else {
450       TRACE("wasm-decode %s\n\n", ok() ? "ok" : "failed");
451     }
452 
453     return true;
454   }
455 
TraceFailed()456   bool TraceFailed() {
457     TRACE("wasm-error module+%-6d func+%d: %s\n\n", baserel(error_pc_),
458           startrel(error_pc_), error_msg_.get());
459     return false;
460   }
461 
DecodeLocalDecls(AstLocalDecls & decls)462   bool DecodeLocalDecls(AstLocalDecls& decls) {
463     DecodeLocalDecls();
464     if (failed()) return false;
465     decls.decls_encoded_size = pc_offset();
466     decls.local_types.reserve(local_type_vec_.size());
467     for (size_t pos = 0; pos < local_type_vec_.size();) {
468       uint32_t count = 0;
469       LocalType type = local_type_vec_[pos];
470       while (pos < local_type_vec_.size() && local_type_vec_[pos] == type) {
471         pos++;
472         count++;
473       }
474       decls.local_types.push_back(std::pair<LocalType, uint32_t>(type, count));
475     }
476     decls.total_local_count = static_cast<uint32_t>(local_type_vec_.size());
477     return true;
478   }
479 
AnalyzeLoopAssignmentForTesting(const byte * pc,size_t num_locals)480   BitVector* AnalyzeLoopAssignmentForTesting(const byte* pc,
481                                              size_t num_locals) {
482     total_locals_ = num_locals;
483     local_type_vec_.reserve(num_locals);
484     if (num_locals > local_type_vec_.size()) {
485       local_type_vec_.insert(local_type_vec_.end(),
486                              num_locals - local_type_vec_.size(), kAstI32);
487     }
488     return AnalyzeLoopAssignment(pc);
489   }
490 
491  private:
492   static const size_t kErrorMsgSize = 128;
493 
494   Zone* zone_;
495   TFBuilder* builder_;
496   const byte* base_;
497 
498   SsaEnv* ssa_env_;
499 
500   ZoneVector<LocalType> local_type_vec_;  // types of local variables.
501   ZoneVector<Value> stack_;               // stack of values.
502   ZoneVector<Control> control_;           // stack of blocks, loops, and ifs.
503   bool last_end_found_;
504 
505   int32_t current_catch_;
506 
current_try_info()507   TryInfo* current_try_info() { return control_[current_catch_].try_info; }
508 
build()509   inline bool build() { return builder_ && ssa_env_->go(); }
510 
InitSsaEnv()511   void InitSsaEnv() {
512     TFNode* start = nullptr;
513     SsaEnv* ssa_env = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
514     size_t size = sizeof(TFNode*) * EnvironmentCount();
515     ssa_env->state = SsaEnv::kReached;
516     ssa_env->locals =
517         size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
518 
519     if (builder_) {
520       start = builder_->Start(static_cast<int>(sig_->parameter_count() + 1));
521       // Initialize local variables.
522       uint32_t index = 0;
523       while (index < sig_->parameter_count()) {
524         ssa_env->locals[index] = builder_->Param(index, local_type_vec_[index]);
525         index++;
526       }
527       while (index < local_type_vec_.size()) {
528         LocalType type = local_type_vec_[index];
529         TFNode* node = DefaultValue(type);
530         while (index < local_type_vec_.size() &&
531                local_type_vec_[index] == type) {
532           // Do a whole run of like-typed locals at a time.
533           ssa_env->locals[index++] = node;
534         }
535       }
536       builder_->set_module(module_);
537     }
538     ssa_env->control = start;
539     ssa_env->effect = start;
540     SetEnv("initial", ssa_env);
541     if (builder_) {
542       builder_->StackCheck(position());
543     }
544   }
545 
DefaultValue(LocalType type)546   TFNode* DefaultValue(LocalType type) {
547     switch (type) {
548       case kAstI32:
549         return builder_->Int32Constant(0);
550       case kAstI64:
551         return builder_->Int64Constant(0);
552       case kAstF32:
553         return builder_->Float32Constant(0);
554       case kAstF64:
555         return builder_->Float64Constant(0);
556       case kAstS128:
557         return builder_->CreateS128Value(0);
558       default:
559         UNREACHABLE();
560         return nullptr;
561     }
562   }
563 
indentation()564   char* indentation() {
565     static const int kMaxIndent = 64;
566     static char bytes[kMaxIndent + 1];
567     for (int i = 0; i < kMaxIndent; ++i) bytes[i] = ' ';
568     bytes[kMaxIndent] = 0;
569     if (stack_.size() < kMaxIndent / 2) {
570       bytes[stack_.size() * 2] = 0;
571     }
572     return bytes;
573   }
574 
575   // Decodes the locals declarations, if any, populating {local_type_vec_}.
DecodeLocalDecls()576   void DecodeLocalDecls() {
577     DCHECK_EQ(0u, local_type_vec_.size());
578     // Initialize {local_type_vec} from signature.
579     if (sig_) {
580       local_type_vec_.reserve(sig_->parameter_count());
581       for (size_t i = 0; i < sig_->parameter_count(); ++i) {
582         local_type_vec_.push_back(sig_->GetParam(i));
583       }
584     }
585     // Decode local declarations, if any.
586     uint32_t entries = consume_u32v("local decls count");
587     TRACE("local decls count: %u\n", entries);
588     while (entries-- > 0 && pc_ < limit_) {
589       uint32_t count = consume_u32v("local count");
590       if (count > kMaxNumWasmLocals) {
591         error(pc_ - 1, "local count too large");
592         return;
593       }
594       byte code = consume_u8("local type");
595       LocalType type;
596       switch (code) {
597         case kLocalI32:
598           type = kAstI32;
599           break;
600         case kLocalI64:
601           type = kAstI64;
602           break;
603         case kLocalF32:
604           type = kAstF32;
605           break;
606         case kLocalF64:
607           type = kAstF64;
608           break;
609         case kLocalS128:
610           type = kAstS128;
611           break;
612         default:
613           error(pc_ - 1, "invalid local type");
614           return;
615       }
616       local_type_vec_.insert(local_type_vec_.end(), count, type);
617     }
618     total_locals_ = local_type_vec_.size();
619   }
620 
621   // Decodes the body of a function.
DecodeFunctionBody()622   void DecodeFunctionBody() {
623     TRACE("wasm-decode %p...%p (module+%d, %d bytes) %s\n",
624           reinterpret_cast<const void*>(start_),
625           reinterpret_cast<const void*>(limit_), baserel(pc_),
626           static_cast<int>(limit_ - start_), builder_ ? "graph building" : "");
627 
628     {
629       // Set up initial function block.
630       SsaEnv* break_env = ssa_env_;
631       SetEnv("initial env", Steal(break_env));
632       PushBlock(break_env);
633       Control* c = &control_.back();
634       c->merge.arity = static_cast<uint32_t>(sig_->return_count());
635 
636       if (c->merge.arity == 1) {
637         c->merge.vals.first = {pc_, nullptr, sig_->GetReturn(0)};
638       } else if (c->merge.arity > 1) {
639         c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
640         for (unsigned i = 0; i < c->merge.arity; i++) {
641           c->merge.vals.array[i] = {pc_, nullptr, sig_->GetReturn(i)};
642         }
643       }
644     }
645 
646     if (pc_ >= limit_) return;  // Nothing to do.
647 
648     while (true) {  // decoding loop.
649       unsigned len = 1;
650       WasmOpcode opcode = static_cast<WasmOpcode>(*pc_);
651       if (!WasmOpcodes::IsPrefixOpcode(opcode)) {
652         TRACE("  @%-8d #%02x:%-20s|", startrel(pc_), opcode,
653               WasmOpcodes::ShortOpcodeName(opcode));
654       }
655 
656       FunctionSig* sig = WasmOpcodes::Signature(opcode);
657       if (sig) {
658         BuildSimpleOperator(opcode, sig);
659       } else {
660         // Complex bytecode.
661         switch (opcode) {
662           case kExprNop:
663             break;
664           case kExprBlock: {
665             // The break environment is the outer environment.
666             BlockTypeOperand operand(this, pc_);
667             SsaEnv* break_env = ssa_env_;
668             PushBlock(break_env);
669             SetEnv("block:start", Steal(break_env));
670             SetBlockType(&control_.back(), operand);
671             len = 1 + operand.length;
672             break;
673           }
674           case kExprThrow: {
675             CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
676             Value value = Pop(0, kAstI32);
677             BUILD(Throw, value.node);
678             break;
679           }
680           case kExprTry: {
681             CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
682             BlockTypeOperand operand(this, pc_);
683             SsaEnv* outer_env = ssa_env_;
684             SsaEnv* try_env = Steal(outer_env);
685             SsaEnv* catch_env = UnreachableEnv();
686             PushTry(outer_env, catch_env);
687             SetEnv("try_catch:start", try_env);
688             SetBlockType(&control_.back(), operand);
689             len = 1 + operand.length;
690             break;
691           }
692           case kExprCatch: {
693             CHECK_PROTOTYPE_OPCODE(wasm_eh_prototype);
694             LocalIndexOperand operand(this, pc_);
695             len = 1 + operand.length;
696 
697             if (control_.empty()) {
698               error("catch does not match any try");
699               break;
700             }
701 
702             Control* c = &control_.back();
703             if (!c->is_try()) {
704               error("catch does not match any try");
705               break;
706             }
707 
708             if (c->try_info->catch_env == nullptr) {
709               error(pc_, "catch already present for try with catch");
710               break;
711             }
712 
713             if (ssa_env_->go()) {
714               MergeValuesInto(c);
715             }
716             stack_.resize(c->stack_depth);
717 
718             DCHECK_NOT_NULL(c->try_info);
719             SsaEnv* catch_env = c->try_info->catch_env;
720             c->try_info->catch_env = nullptr;
721             SetEnv("catch:begin", catch_env);
722             current_catch_ = c->previous_catch;
723 
724             if (Validate(pc_, operand)) {
725               if (ssa_env_->locals) {
726                 TFNode* exception_as_i32 =
727                     BUILD(Catch, c->try_info->exception, position());
728                 ssa_env_->locals[operand.index] = exception_as_i32;
729               }
730             }
731 
732             break;
733           }
734           case kExprLoop: {
735             BlockTypeOperand operand(this, pc_);
736             SsaEnv* finish_try_env = Steal(ssa_env_);
737             // The continue environment is the inner environment.
738             SsaEnv* loop_body_env = PrepareForLoop(pc_, finish_try_env);
739             SetEnv("loop:start", loop_body_env);
740             ssa_env_->SetNotMerged();
741             PushLoop(finish_try_env);
742             SetBlockType(&control_.back(), operand);
743             len = 1 + operand.length;
744             break;
745           }
746           case kExprIf: {
747             // Condition on top of stack. Split environments for branches.
748             BlockTypeOperand operand(this, pc_);
749             Value cond = Pop(0, kAstI32);
750             TFNode* if_true = nullptr;
751             TFNode* if_false = nullptr;
752             BUILD(BranchNoHint, cond.node, &if_true, &if_false);
753             SsaEnv* end_env = ssa_env_;
754             SsaEnv* false_env = Split(ssa_env_);
755             false_env->control = if_false;
756             SsaEnv* true_env = Steal(ssa_env_);
757             true_env->control = if_true;
758             PushIf(end_env, false_env);
759             SetEnv("if:true", true_env);
760             SetBlockType(&control_.back(), operand);
761             len = 1 + operand.length;
762             break;
763           }
764           case kExprElse: {
765             if (control_.empty()) {
766               error("else does not match any if");
767               break;
768             }
769             Control* c = &control_.back();
770             if (!c->is_if()) {
771               error(pc_, c->pc, "else does not match an if");
772               break;
773             }
774             if (c->false_env == nullptr) {
775               error(pc_, c->pc, "else already present for if");
776               break;
777             }
778             FallThruTo(c);
779             // Switch to environment for false branch.
780             stack_.resize(c->stack_depth);
781             SetEnv("if_else:false", c->false_env);
782             c->false_env = nullptr;  // record that an else is already seen
783             break;
784           }
785           case kExprEnd: {
786             if (control_.empty()) {
787               error("end does not match any if, try, or block");
788               return;
789             }
790             const char* name = "block:end";
791             Control* c = &control_.back();
792             if (c->is_loop()) {
793               // A loop just leaves the values on the stack.
794               TypeCheckLoopFallThru(c);
795               PopControl();
796               SetEnv("loop:end", ssa_env_);
797               break;
798             }
799             if (c->is_if()) {
800               if (c->false_env != nullptr) {
801                 // End the true branch of a one-armed if.
802                 Goto(c->false_env, c->end_env);
803                 if (ssa_env_->go() &&
804                     static_cast<int>(stack_.size()) != c->stack_depth) {
805                   error("end of if expected empty stack");
806                   stack_.resize(c->stack_depth);
807                 }
808                 if (c->merge.arity > 0) {
809                   error("non-void one-armed if");
810                 }
811                 name = "if:merge";
812               } else {
813                 // End the false branch of a two-armed if.
814                 name = "if_else:merge";
815               }
816             } else if (c->is_try()) {
817               name = "try:end";
818 
819               // validate that catch was seen.
820               if (c->try_info->catch_env != nullptr) {
821                 error(pc_, "missing catch in try");
822                 break;
823               }
824             }
825             FallThruTo(c);
826             SetEnv(name, c->end_env);
827 
828             // Push the end values onto the stack.
829             stack_.resize(c->stack_depth);
830             if (c->merge.arity == 1) {
831               stack_.push_back(c->merge.vals.first);
832             } else {
833               for (unsigned i = 0; i < c->merge.arity; i++) {
834                 stack_.push_back(c->merge.vals.array[i]);
835               }
836             }
837 
838             PopControl();
839 
840             if (control_.empty()) {
841               // If the last (implicit) control was popped, check we are at end.
842               if (pc_ + 1 != end_) {
843                 error(pc_, pc_ + 1, "trailing code after function end");
844               }
845               last_end_found_ = true;
846               if (ssa_env_->go()) {
847                 // The result of the block is the return value.
848                 TRACE("  @%-8d #xx:%-20s|", startrel(pc_), "ImplicitReturn");
849                 DoReturn();
850                 TRACE("\n");
851               }
852               return;
853             }
854             break;
855           }
856           case kExprSelect: {
857             Value cond = Pop(2, kAstI32);
858             Value fval = Pop();
859             Value tval = Pop();
860             if (tval.type == kAstStmt || tval.type != fval.type) {
861               if (tval.type != kAstEnd && fval.type != kAstEnd) {
862                 error("type mismatch in select");
863                 break;
864               }
865             }
866             if (build()) {
867               DCHECK(tval.type != kAstEnd);
868               DCHECK(fval.type != kAstEnd);
869               DCHECK(cond.type != kAstEnd);
870               TFNode* controls[2];
871               builder_->BranchNoHint(cond.node, &controls[0], &controls[1]);
872               TFNode* merge = builder_->Merge(2, controls);
873               TFNode* vals[2] = {tval.node, fval.node};
874               TFNode* phi = builder_->Phi(tval.type, 2, vals, merge);
875               Push(tval.type, phi);
876               ssa_env_->control = merge;
877             } else {
878               Push(tval.type, nullptr);
879             }
880             break;
881           }
882           case kExprBr: {
883             BreakDepthOperand operand(this, pc_);
884             if (Validate(pc_, operand, control_)) {
885               BreakTo(operand.depth);
886             }
887             len = 1 + operand.length;
888             EndControl();
889             break;
890           }
891           case kExprBrIf: {
892             BreakDepthOperand operand(this, pc_);
893             Value cond = Pop(0, kAstI32);
894             if (ok() && Validate(pc_, operand, control_)) {
895               SsaEnv* fenv = ssa_env_;
896               SsaEnv* tenv = Split(fenv);
897               fenv->SetNotMerged();
898               BUILD(BranchNoHint, cond.node, &tenv->control, &fenv->control);
899               ssa_env_ = tenv;
900               BreakTo(operand.depth);
901               ssa_env_ = fenv;
902             }
903             len = 1 + operand.length;
904             break;
905           }
906           case kExprBrTable: {
907             BranchTableOperand operand(this, pc_);
908             BranchTableIterator iterator(this, operand);
909             if (Validate(pc_, operand, control_.size())) {
910               Value key = Pop(0, kAstI32);
911               if (failed()) break;
912 
913               SsaEnv* break_env = ssa_env_;
914               if (operand.table_count > 0) {
915                 // Build branches to the various blocks based on the table.
916                 TFNode* sw = BUILD(Switch, operand.table_count + 1, key.node);
917 
918                 SsaEnv* copy = Steal(break_env);
919                 ssa_env_ = copy;
920                 while (ok() && iterator.has_next()) {
921                   uint32_t i = iterator.cur_index();
922                   const byte* pos = iterator.pc();
923                   uint32_t target = iterator.next();
924                   if (target >= control_.size()) {
925                     error(pos, "improper branch in br_table");
926                     break;
927                   }
928                   ssa_env_ = Split(copy);
929                   ssa_env_->control = (i == operand.table_count)
930                                           ? BUILD(IfDefault, sw)
931                                           : BUILD(IfValue, i, sw);
932                   BreakTo(target);
933                 }
934                 if (failed()) break;
935               } else {
936                 // Only a default target. Do the equivalent of br.
937                 const byte* pos = iterator.pc();
938                 uint32_t target = iterator.next();
939                 if (target >= control_.size()) {
940                   error(pos, "improper branch in br_table");
941                   break;
942                 }
943                 BreakTo(target);
944               }
945               // br_table ends the control flow like br.
946               ssa_env_ = break_env;
947             }
948             len = 1 + iterator.length();
949             break;
950           }
951           case kExprReturn: {
952             DoReturn();
953             break;
954           }
955           case kExprUnreachable: {
956             BUILD(Unreachable, position());
957             EndControl();
958             break;
959           }
960           case kExprI8Const: {
961             ImmI8Operand operand(this, pc_);
962             Push(kAstI32, BUILD(Int32Constant, operand.value));
963             len = 1 + operand.length;
964             break;
965           }
966           case kExprI32Const: {
967             ImmI32Operand operand(this, pc_);
968             Push(kAstI32, BUILD(Int32Constant, operand.value));
969             len = 1 + operand.length;
970             break;
971           }
972           case kExprI64Const: {
973             ImmI64Operand operand(this, pc_);
974             Push(kAstI64, BUILD(Int64Constant, operand.value));
975             len = 1 + operand.length;
976             break;
977           }
978           case kExprF32Const: {
979             ImmF32Operand operand(this, pc_);
980             Push(kAstF32, BUILD(Float32Constant, operand.value));
981             len = 1 + operand.length;
982             break;
983           }
984           case kExprF64Const: {
985             ImmF64Operand operand(this, pc_);
986             Push(kAstF64, BUILD(Float64Constant, operand.value));
987             len = 1 + operand.length;
988             break;
989           }
990           case kExprGetLocal: {
991             LocalIndexOperand operand(this, pc_);
992             if (Validate(pc_, operand)) {
993               if (build()) {
994                 Push(operand.type, ssa_env_->locals[operand.index]);
995               } else {
996                 Push(operand.type, nullptr);
997               }
998             }
999             len = 1 + operand.length;
1000             break;
1001           }
1002           case kExprSetLocal: {
1003             LocalIndexOperand operand(this, pc_);
1004             if (Validate(pc_, operand)) {
1005               Value val = Pop(0, local_type_vec_[operand.index]);
1006               if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
1007             }
1008             len = 1 + operand.length;
1009             break;
1010           }
1011           case kExprTeeLocal: {
1012             LocalIndexOperand operand(this, pc_);
1013             if (Validate(pc_, operand)) {
1014               Value val = Pop(0, local_type_vec_[operand.index]);
1015               if (ssa_env_->locals) ssa_env_->locals[operand.index] = val.node;
1016               Push(val.type, val.node);
1017             }
1018             len = 1 + operand.length;
1019             break;
1020           }
1021           case kExprDrop: {
1022             Pop();
1023             break;
1024           }
1025           case kExprGetGlobal: {
1026             GlobalIndexOperand operand(this, pc_);
1027             if (Validate(pc_, operand)) {
1028               Push(operand.type, BUILD(GetGlobal, operand.index));
1029             }
1030             len = 1 + operand.length;
1031             break;
1032           }
1033           case kExprSetGlobal: {
1034             GlobalIndexOperand operand(this, pc_);
1035             if (Validate(pc_, operand)) {
1036               if (operand.global->mutability) {
1037                 Value val = Pop(0, operand.type);
1038                 BUILD(SetGlobal, operand.index, val.node);
1039               } else {
1040                 error(pc_, pc_ + 1, "immutable global #%u cannot be assigned",
1041                       operand.index);
1042               }
1043             }
1044             len = 1 + operand.length;
1045             break;
1046           }
1047           case kExprI32LoadMem8S:
1048             len = DecodeLoadMem(kAstI32, MachineType::Int8());
1049             break;
1050           case kExprI32LoadMem8U:
1051             len = DecodeLoadMem(kAstI32, MachineType::Uint8());
1052             break;
1053           case kExprI32LoadMem16S:
1054             len = DecodeLoadMem(kAstI32, MachineType::Int16());
1055             break;
1056           case kExprI32LoadMem16U:
1057             len = DecodeLoadMem(kAstI32, MachineType::Uint16());
1058             break;
1059           case kExprI32LoadMem:
1060             len = DecodeLoadMem(kAstI32, MachineType::Int32());
1061             break;
1062           case kExprI64LoadMem8S:
1063             len = DecodeLoadMem(kAstI64, MachineType::Int8());
1064             break;
1065           case kExprI64LoadMem8U:
1066             len = DecodeLoadMem(kAstI64, MachineType::Uint8());
1067             break;
1068           case kExprI64LoadMem16S:
1069             len = DecodeLoadMem(kAstI64, MachineType::Int16());
1070             break;
1071           case kExprI64LoadMem16U:
1072             len = DecodeLoadMem(kAstI64, MachineType::Uint16());
1073             break;
1074           case kExprI64LoadMem32S:
1075             len = DecodeLoadMem(kAstI64, MachineType::Int32());
1076             break;
1077           case kExprI64LoadMem32U:
1078             len = DecodeLoadMem(kAstI64, MachineType::Uint32());
1079             break;
1080           case kExprI64LoadMem:
1081             len = DecodeLoadMem(kAstI64, MachineType::Int64());
1082             break;
1083           case kExprF32LoadMem:
1084             len = DecodeLoadMem(kAstF32, MachineType::Float32());
1085             break;
1086           case kExprF64LoadMem:
1087             len = DecodeLoadMem(kAstF64, MachineType::Float64());
1088             break;
1089           case kExprI32StoreMem8:
1090             len = DecodeStoreMem(kAstI32, MachineType::Int8());
1091             break;
1092           case kExprI32StoreMem16:
1093             len = DecodeStoreMem(kAstI32, MachineType::Int16());
1094             break;
1095           case kExprI32StoreMem:
1096             len = DecodeStoreMem(kAstI32, MachineType::Int32());
1097             break;
1098           case kExprI64StoreMem8:
1099             len = DecodeStoreMem(kAstI64, MachineType::Int8());
1100             break;
1101           case kExprI64StoreMem16:
1102             len = DecodeStoreMem(kAstI64, MachineType::Int16());
1103             break;
1104           case kExprI64StoreMem32:
1105             len = DecodeStoreMem(kAstI64, MachineType::Int32());
1106             break;
1107           case kExprI64StoreMem:
1108             len = DecodeStoreMem(kAstI64, MachineType::Int64());
1109             break;
1110           case kExprF32StoreMem:
1111             len = DecodeStoreMem(kAstF32, MachineType::Float32());
1112             break;
1113           case kExprF64StoreMem:
1114             len = DecodeStoreMem(kAstF64, MachineType::Float64());
1115             break;
1116           case kExprGrowMemory: {
1117             MemoryIndexOperand operand(this, pc_);
1118             if (module_->origin != kAsmJsOrigin) {
1119               Value val = Pop(0, kAstI32);
1120               Push(kAstI32, BUILD(GrowMemory, val.node));
1121             } else {
1122               error("grow_memory is not supported for asmjs modules");
1123             }
1124             len = 1 + operand.length;
1125             break;
1126           }
1127           case kExprMemorySize: {
1128             MemoryIndexOperand operand(this, pc_);
1129             Push(kAstI32, BUILD(CurrentMemoryPages));
1130             len = 1 + operand.length;
1131             break;
1132           }
1133           case kExprCallFunction: {
1134             CallFunctionOperand operand(this, pc_);
1135             if (Validate(pc_, operand)) {
1136               TFNode** buffer = PopArgs(operand.sig);
1137               TFNode** rets = nullptr;
1138               BUILD(CallDirect, operand.index, buffer, &rets, position());
1139               PushReturns(operand.sig, rets);
1140             }
1141             len = 1 + operand.length;
1142             break;
1143           }
1144           case kExprCallIndirect: {
1145             CallIndirectOperand operand(this, pc_);
1146             if (Validate(pc_, operand)) {
1147               Value index = Pop(0, kAstI32);
1148               TFNode** buffer = PopArgs(operand.sig);
1149               if (buffer) buffer[0] = index.node;
1150               TFNode** rets = nullptr;
1151               BUILD(CallIndirect, operand.index, buffer, &rets, position());
1152               PushReturns(operand.sig, rets);
1153             }
1154             len = 1 + operand.length;
1155             break;
1156           }
1157           case kSimdPrefix: {
1158             CHECK_PROTOTYPE_OPCODE(wasm_simd_prototype);
1159             len++;
1160             byte simd_index = checked_read_u8(pc_, 1, "simd index");
1161             opcode = static_cast<WasmOpcode>(opcode << 8 | simd_index);
1162             TRACE("  @%-4d #%02x #%02x:%-20s|", startrel(pc_), kSimdPrefix,
1163                   simd_index, WasmOpcodes::ShortOpcodeName(opcode));
1164             len += DecodeSimdOpcode(opcode);
1165             break;
1166           }
1167           case kAtomicPrefix: {
1168             if (!module_ || module_->origin != kAsmJsOrigin) {
1169               error("Atomics are allowed only in AsmJs modules");
1170               break;
1171             }
1172             if (!FLAG_wasm_atomics_prototype) {
1173               error("Invalid opcode (enable with --wasm_atomics_prototype)");
1174               break;
1175             }
1176             len = 2;
1177             byte atomic_opcode = checked_read_u8(pc_, 1, "atomic index");
1178             opcode = static_cast<WasmOpcode>(opcode << 8 | atomic_opcode);
1179             sig = WasmOpcodes::AtomicSignature(opcode);
1180             if (sig) {
1181               BuildAtomicOperator(opcode);
1182             }
1183             break;
1184           }
1185           default: {
1186             // Deal with special asmjs opcodes.
1187             if (module_ && module_->origin == kAsmJsOrigin) {
1188               sig = WasmOpcodes::AsmjsSignature(opcode);
1189               if (sig) {
1190                 BuildSimpleOperator(opcode, sig);
1191               }
1192             } else {
1193               error("Invalid opcode");
1194               return;
1195             }
1196           }
1197         }
1198       }
1199 
1200 #if DEBUG
1201       if (FLAG_trace_wasm_decoder) {
1202         for (size_t i = 0; i < stack_.size(); ++i) {
1203           Value& val = stack_[i];
1204           WasmOpcode opcode = static_cast<WasmOpcode>(*val.pc);
1205           if (WasmOpcodes::IsPrefixOpcode(opcode)) {
1206             opcode = static_cast<WasmOpcode>(opcode << 8 | *(val.pc + 1));
1207           }
1208           PrintF(" %c@%d:%s", WasmOpcodes::ShortNameOf(val.type),
1209                  static_cast<int>(val.pc - start_),
1210                  WasmOpcodes::ShortOpcodeName(opcode));
1211           switch (opcode) {
1212             case kExprI32Const: {
1213               ImmI32Operand operand(this, val.pc);
1214               PrintF("[%d]", operand.value);
1215               break;
1216             }
1217             case kExprGetLocal: {
1218               LocalIndexOperand operand(this, val.pc);
1219               PrintF("[%u]", operand.index);
1220               break;
1221             }
1222             case kExprSetLocal:  // fallthru
1223             case kExprTeeLocal: {
1224               LocalIndexOperand operand(this, val.pc);
1225               PrintF("[%u]", operand.index);
1226               break;
1227             }
1228             default:
1229               break;
1230           }
1231         }
1232         PrintF("\n");
1233       }
1234 #endif
1235       pc_ += len;
1236       if (pc_ >= limit_) {
1237         // End of code reached or exceeded.
1238         if (pc_ > limit_ && ok()) error("Beyond end of code");
1239         return;
1240       }
1241     }  // end decode loop
1242   }
1243 
EndControl()1244   void EndControl() { ssa_env_->Kill(SsaEnv::kControlEnd); }
1245 
SetBlockType(Control * c,BlockTypeOperand & operand)1246   void SetBlockType(Control* c, BlockTypeOperand& operand) {
1247     c->merge.arity = operand.arity;
1248     if (c->merge.arity == 1) {
1249       c->merge.vals.first = {pc_, nullptr, operand.read_entry(0)};
1250     } else if (c->merge.arity > 1) {
1251       c->merge.vals.array = zone_->NewArray<Value>(c->merge.arity);
1252       for (unsigned i = 0; i < c->merge.arity; i++) {
1253         c->merge.vals.array[i] = {pc_, nullptr, operand.read_entry(i)};
1254       }
1255     }
1256   }
1257 
PopArgs(FunctionSig * sig)1258   TFNode** PopArgs(FunctionSig* sig) {
1259     if (build()) {
1260       int count = static_cast<int>(sig->parameter_count());
1261       TFNode** buffer = builder_->Buffer(count + 1);
1262       buffer[0] = nullptr;  // reserved for code object or function index.
1263       for (int i = count - 1; i >= 0; i--) {
1264         buffer[i + 1] = Pop(i, sig->GetParam(i)).node;
1265       }
1266       return buffer;
1267     } else {
1268       int count = static_cast<int>(sig->parameter_count());
1269       for (int i = count - 1; i >= 0; i--) {
1270         Pop(i, sig->GetParam(i));
1271       }
1272       return nullptr;
1273     }
1274   }
1275 
GetReturnType(FunctionSig * sig)1276   LocalType GetReturnType(FunctionSig* sig) {
1277     return sig->return_count() == 0 ? kAstStmt : sig->GetReturn();
1278   }
1279 
PushBlock(SsaEnv * end_env)1280   void PushBlock(SsaEnv* end_env) {
1281     const int stack_depth = static_cast<int>(stack_.size());
1282     control_.emplace_back(
1283         Control::Block(pc_, stack_depth, end_env, current_catch_));
1284   }
1285 
PushLoop(SsaEnv * end_env)1286   void PushLoop(SsaEnv* end_env) {
1287     const int stack_depth = static_cast<int>(stack_.size());
1288     control_.emplace_back(
1289         Control::Loop(pc_, stack_depth, end_env, current_catch_));
1290   }
1291 
PushIf(SsaEnv * end_env,SsaEnv * false_env)1292   void PushIf(SsaEnv* end_env, SsaEnv* false_env) {
1293     const int stack_depth = static_cast<int>(stack_.size());
1294     control_.emplace_back(
1295         Control::If(pc_, stack_depth, end_env, false_env, current_catch_));
1296   }
1297 
PushTry(SsaEnv * end_env,SsaEnv * catch_env)1298   void PushTry(SsaEnv* end_env, SsaEnv* catch_env) {
1299     const int stack_depth = static_cast<int>(stack_.size());
1300     control_.emplace_back(Control::Try(pc_, stack_depth, end_env, zone_,
1301                                        catch_env, current_catch_));
1302     current_catch_ = static_cast<int32_t>(control_.size() - 1);
1303   }
1304 
PopControl()1305   void PopControl() { control_.pop_back(); }
1306 
DecodeLoadMem(LocalType type,MachineType mem_type)1307   int DecodeLoadMem(LocalType type, MachineType mem_type) {
1308     MemoryAccessOperand operand(this, pc_,
1309                                 ElementSizeLog2Of(mem_type.representation()));
1310 
1311     Value index = Pop(0, kAstI32);
1312     TFNode* node = BUILD(LoadMem, type, mem_type, index.node, operand.offset,
1313                          operand.alignment, position());
1314     Push(type, node);
1315     return 1 + operand.length;
1316   }
1317 
DecodeStoreMem(LocalType type,MachineType mem_type)1318   int DecodeStoreMem(LocalType type, MachineType mem_type) {
1319     MemoryAccessOperand operand(this, pc_,
1320                                 ElementSizeLog2Of(mem_type.representation()));
1321     Value val = Pop(1, type);
1322     Value index = Pop(0, kAstI32);
1323     BUILD(StoreMem, mem_type, index.node, operand.offset, operand.alignment,
1324           val.node, position());
1325     return 1 + operand.length;
1326   }
1327 
ExtractLane(WasmOpcode opcode,LocalType type)1328   unsigned ExtractLane(WasmOpcode opcode, LocalType type) {
1329     LaneOperand operand(this, pc_);
1330     if (Validate(pc_, operand)) {
1331       TFNode* input = Pop(0, LocalType::kSimd128).node;
1332       TFNode* node = BUILD(SimdExtractLane, opcode, operand.lane, input);
1333       Push(type, node);
1334     }
1335     return operand.length;
1336   }
1337 
DecodeSimdOpcode(WasmOpcode opcode)1338   unsigned DecodeSimdOpcode(WasmOpcode opcode) {
1339     unsigned len = 0;
1340     switch (opcode) {
1341       case kExprI32x4ExtractLane: {
1342         len = ExtractLane(opcode, LocalType::kWord32);
1343         break;
1344       }
1345       case kExprF32x4ExtractLane: {
1346         len = ExtractLane(opcode, LocalType::kFloat32);
1347         break;
1348       }
1349       default: {
1350         FunctionSig* sig = WasmOpcodes::Signature(opcode);
1351         if (sig != nullptr) {
1352           compiler::NodeVector inputs(sig->parameter_count(), zone_);
1353           for (size_t i = sig->parameter_count(); i > 0; i--) {
1354             Value val = Pop(static_cast<int>(i - 1), sig->GetParam(i - 1));
1355             inputs[i - 1] = val.node;
1356           }
1357           TFNode* node = BUILD(SimdOp, opcode, inputs);
1358           Push(GetReturnType(sig), node);
1359         } else {
1360           error("invalid simd opcode");
1361         }
1362       }
1363     }
1364     return len;
1365   }
1366 
BuildAtomicOperator(WasmOpcode opcode)1367   void BuildAtomicOperator(WasmOpcode opcode) { UNIMPLEMENTED(); }
1368 
DoReturn()1369   void DoReturn() {
1370     int count = static_cast<int>(sig_->return_count());
1371     TFNode** buffer = nullptr;
1372     if (build()) buffer = builder_->Buffer(count);
1373 
1374     // Pop return values off the stack in reverse order.
1375     for (int i = count - 1; i >= 0; i--) {
1376       Value val = Pop(i, sig_->GetReturn(i));
1377       if (buffer) buffer[i] = val.node;
1378     }
1379 
1380     BUILD(Return, count, buffer);
1381     EndControl();
1382   }
1383 
Push(LocalType type,TFNode * node)1384   void Push(LocalType type, TFNode* node) {
1385     if (type != kAstStmt && type != kAstEnd) {
1386       stack_.push_back({pc_, node, type});
1387     }
1388   }
1389 
PushReturns(FunctionSig * sig,TFNode ** rets)1390   void PushReturns(FunctionSig* sig, TFNode** rets) {
1391     for (size_t i = 0; i < sig->return_count(); i++) {
1392       // When verifying only, then {rets} will be null, so push null.
1393       Push(sig->GetReturn(i), rets ? rets[i] : nullptr);
1394     }
1395   }
1396 
SafeOpcodeNameAt(const byte * pc)1397   const char* SafeOpcodeNameAt(const byte* pc) {
1398     if (pc >= end_) return "<end>";
1399     return WasmOpcodes::ShortOpcodeName(static_cast<WasmOpcode>(*pc));
1400   }
1401 
Pop(int index,LocalType expected)1402   Value Pop(int index, LocalType expected) {
1403     if (!ssa_env_->go()) {
1404       // Unreachable code is essentially not typechecked.
1405       return {pc_, nullptr, expected};
1406     }
1407     Value val = Pop();
1408     if (val.type != expected) {
1409       if (val.type != kAstEnd) {
1410         error(pc_, val.pc, "%s[%d] expected type %s, found %s of type %s",
1411               SafeOpcodeNameAt(pc_), index, WasmOpcodes::TypeName(expected),
1412               SafeOpcodeNameAt(val.pc), WasmOpcodes::TypeName(val.type));
1413       }
1414     }
1415     return val;
1416   }
1417 
Pop()1418   Value Pop() {
1419     if (!ssa_env_->go()) {
1420       // Unreachable code is essentially not typechecked.
1421       return {pc_, nullptr, kAstEnd};
1422     }
1423     size_t limit = control_.empty() ? 0 : control_.back().stack_depth;
1424     if (stack_.size() <= limit) {
1425       Value val = {pc_, nullptr, kAstStmt};
1426       error(pc_, pc_, "%s found empty stack", SafeOpcodeNameAt(pc_));
1427       return val;
1428     }
1429     Value val = stack_.back();
1430     stack_.pop_back();
1431     return val;
1432   }
1433 
PopUpTo(int stack_depth)1434   Value PopUpTo(int stack_depth) {
1435     if (!ssa_env_->go()) {
1436       // Unreachable code is essentially not typechecked.
1437       return {pc_, nullptr, kAstEnd};
1438     }
1439     if (stack_depth == static_cast<int>(stack_.size())) {
1440       Value val = {pc_, nullptr, kAstStmt};
1441       return val;
1442     } else {
1443       DCHECK_LE(stack_depth, static_cast<int>(stack_.size()));
1444       Value val = Pop();
1445       stack_.resize(stack_depth);
1446       return val;
1447     }
1448   }
1449 
baserel(const byte * ptr)1450   int baserel(const byte* ptr) {
1451     return base_ ? static_cast<int>(ptr - base_) : 0;
1452   }
1453 
startrel(const byte * ptr)1454   int startrel(const byte* ptr) { return static_cast<int>(ptr - start_); }
1455 
BreakTo(unsigned depth)1456   void BreakTo(unsigned depth) {
1457     if (!ssa_env_->go()) return;
1458     Control* c = &control_[control_.size() - depth - 1];
1459     if (c->is_loop()) {
1460       // This is the inner loop block, which does not have a value.
1461       Goto(ssa_env_, c->end_env);
1462     } else {
1463       // Merge the value(s) into the end of the block.
1464       if (c->stack_depth + c->merge.arity > stack_.size()) {
1465         error(
1466             pc_, pc_,
1467             "expected at least %d values on the stack for br to @%d, found %d",
1468             c->merge.arity, startrel(c->pc),
1469             static_cast<int>(stack_.size() - c->stack_depth));
1470         return;
1471       }
1472       MergeValuesInto(c);
1473     }
1474   }
1475 
FallThruTo(Control * c)1476   void FallThruTo(Control* c) {
1477     if (!ssa_env_->go()) return;
1478     // Merge the value(s) into the end of the block.
1479     int arity = static_cast<int>(c->merge.arity);
1480     if (c->stack_depth + arity != static_cast<int>(stack_.size())) {
1481       error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
1482             arity, startrel(c->pc));
1483       return;
1484     }
1485     MergeValuesInto(c);
1486   }
1487 
GetMergeValueFromStack(Control * c,int i)1488   inline Value& GetMergeValueFromStack(Control* c, int i) {
1489     return stack_[stack_.size() - c->merge.arity + i];
1490   }
1491 
TypeCheckLoopFallThru(Control * c)1492   void TypeCheckLoopFallThru(Control* c) {
1493     if (!ssa_env_->go()) return;
1494     // Fallthru must match arity exactly.
1495     int arity = static_cast<int>(c->merge.arity);
1496     if (c->stack_depth + arity != static_cast<int>(stack_.size())) {
1497       error(pc_, pc_, "expected %d elements on the stack for fallthru to @%d",
1498             arity, startrel(c->pc));
1499       return;
1500     }
1501     // Typecheck the values left on the stack.
1502     for (unsigned i = 0; i < c->merge.arity; i++) {
1503       Value& val = GetMergeValueFromStack(c, i);
1504       Value& old =
1505           c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
1506       if (val.type != old.type) {
1507         error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
1508               WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1509         return;
1510       }
1511     }
1512   }
1513 
MergeValuesInto(Control * c)1514   void MergeValuesInto(Control* c) {
1515     SsaEnv* target = c->end_env;
1516     bool first = target->state == SsaEnv::kUnreachable;
1517     Goto(ssa_env_, target);
1518 
1519     for (unsigned i = 0; i < c->merge.arity; i++) {
1520       Value& val = GetMergeValueFromStack(c, i);
1521       Value& old =
1522           c->merge.arity == 1 ? c->merge.vals.first : c->merge.vals.array[i];
1523       if (val.type != old.type) {
1524         error(pc_, pc_, "type error in merge[%d] (expected %s, got %s)", i,
1525               WasmOpcodes::TypeName(old.type), WasmOpcodes::TypeName(val.type));
1526         return;
1527       }
1528       if (builder_) {
1529         old.node =
1530             first ? val.node : CreateOrMergeIntoPhi(old.type, target->control,
1531                                                     old.node, val.node);
1532       } else {
1533         old.node = nullptr;
1534       }
1535     }
1536   }
1537 
SetEnv(const char * reason,SsaEnv * env)1538   void SetEnv(const char* reason, SsaEnv* env) {
1539 #if DEBUG
1540     if (FLAG_trace_wasm_decoder) {
1541       char state = 'X';
1542       if (env) {
1543         switch (env->state) {
1544           case SsaEnv::kReached:
1545             state = 'R';
1546             break;
1547           case SsaEnv::kUnreachable:
1548             state = 'U';
1549             break;
1550           case SsaEnv::kMerged:
1551             state = 'M';
1552             break;
1553           case SsaEnv::kControlEnd:
1554             state = 'E';
1555             break;
1556         }
1557       }
1558       PrintF("  env = %p, state = %c, reason = %s", static_cast<void*>(env),
1559              state, reason);
1560       if (env && env->control) {
1561         PrintF(", control = ");
1562         compiler::WasmGraphBuilder::PrintDebugName(env->control);
1563       }
1564       PrintF("\n");
1565     }
1566 #endif
1567     ssa_env_ = env;
1568     if (builder_) {
1569       builder_->set_control_ptr(&env->control);
1570       builder_->set_effect_ptr(&env->effect);
1571     }
1572   }
1573 
CheckForException(TFNode * node)1574   TFNode* CheckForException(TFNode* node) {
1575     if (node == nullptr) {
1576       return nullptr;
1577     }
1578 
1579     const bool inside_try_scope = current_catch_ != kNullCatch;
1580 
1581     if (!inside_try_scope) {
1582       return node;
1583     }
1584 
1585     TFNode* if_success = nullptr;
1586     TFNode* if_exception = nullptr;
1587     if (!builder_->ThrowsException(node, &if_success, &if_exception)) {
1588       return node;
1589     }
1590 
1591     SsaEnv* success_env = Steal(ssa_env_);
1592     success_env->control = if_success;
1593 
1594     SsaEnv* exception_env = Split(success_env);
1595     exception_env->control = if_exception;
1596     TryInfo* try_info = current_try_info();
1597     Goto(exception_env, try_info->catch_env);
1598     TFNode* exception = try_info->exception;
1599     if (exception == nullptr) {
1600       DCHECK_EQ(SsaEnv::kReached, try_info->catch_env->state);
1601       try_info->exception = if_exception;
1602     } else {
1603       DCHECK_EQ(SsaEnv::kMerged, try_info->catch_env->state);
1604       try_info->exception =
1605           CreateOrMergeIntoPhi(kAstI32, try_info->catch_env->control,
1606                                try_info->exception, if_exception);
1607     }
1608 
1609     SetEnv("if_success", success_env);
1610     return node;
1611   }
1612 
Goto(SsaEnv * from,SsaEnv * to)1613   void Goto(SsaEnv* from, SsaEnv* to) {
1614     DCHECK_NOT_NULL(to);
1615     if (!from->go()) return;
1616     switch (to->state) {
1617       case SsaEnv::kUnreachable: {  // Overwrite destination.
1618         to->state = SsaEnv::kReached;
1619         to->locals = from->locals;
1620         to->control = from->control;
1621         to->effect = from->effect;
1622         break;
1623       }
1624       case SsaEnv::kReached: {  // Create a new merge.
1625         to->state = SsaEnv::kMerged;
1626         if (!builder_) break;
1627         // Merge control.
1628         TFNode* controls[] = {to->control, from->control};
1629         TFNode* merge = builder_->Merge(2, controls);
1630         to->control = merge;
1631         // Merge effects.
1632         if (from->effect != to->effect) {
1633           TFNode* effects[] = {to->effect, from->effect, merge};
1634           to->effect = builder_->EffectPhi(2, effects, merge);
1635         }
1636         // Merge SSA values.
1637         for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1638           TFNode* a = to->locals[i];
1639           TFNode* b = from->locals[i];
1640           if (a != b) {
1641             TFNode* vals[] = {a, b};
1642             to->locals[i] = builder_->Phi(local_type_vec_[i], 2, vals, merge);
1643           }
1644         }
1645         break;
1646       }
1647       case SsaEnv::kMerged: {
1648         if (!builder_) break;
1649         TFNode* merge = to->control;
1650         // Extend the existing merge.
1651         builder_->AppendToMerge(merge, from->control);
1652         // Merge effects.
1653         if (builder_->IsPhiWithMerge(to->effect, merge)) {
1654           builder_->AppendToPhi(to->effect, from->effect);
1655         } else if (to->effect != from->effect) {
1656           uint32_t count = builder_->InputCount(merge);
1657           TFNode** effects = builder_->Buffer(count);
1658           for (uint32_t j = 0; j < count - 1; j++) {
1659             effects[j] = to->effect;
1660           }
1661           effects[count - 1] = from->effect;
1662           to->effect = builder_->EffectPhi(count, effects, merge);
1663         }
1664         // Merge locals.
1665         for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1666           TFNode* tnode = to->locals[i];
1667           TFNode* fnode = from->locals[i];
1668           if (builder_->IsPhiWithMerge(tnode, merge)) {
1669             builder_->AppendToPhi(tnode, fnode);
1670           } else if (tnode != fnode) {
1671             uint32_t count = builder_->InputCount(merge);
1672             TFNode** vals = builder_->Buffer(count);
1673             for (uint32_t j = 0; j < count - 1; j++) {
1674               vals[j] = tnode;
1675             }
1676             vals[count - 1] = fnode;
1677             to->locals[i] =
1678                 builder_->Phi(local_type_vec_[i], count, vals, merge);
1679           }
1680         }
1681         break;
1682       }
1683       default:
1684         UNREACHABLE();
1685     }
1686     return from->Kill();
1687   }
1688 
CreateOrMergeIntoPhi(LocalType type,TFNode * merge,TFNode * tnode,TFNode * fnode)1689   TFNode* CreateOrMergeIntoPhi(LocalType type, TFNode* merge, TFNode* tnode,
1690                                TFNode* fnode) {
1691     DCHECK_NOT_NULL(builder_);
1692     if (builder_->IsPhiWithMerge(tnode, merge)) {
1693       builder_->AppendToPhi(tnode, fnode);
1694     } else if (tnode != fnode) {
1695       uint32_t count = builder_->InputCount(merge);
1696       TFNode** vals = builder_->Buffer(count);
1697       for (uint32_t j = 0; j < count - 1; j++) vals[j] = tnode;
1698       vals[count - 1] = fnode;
1699       return builder_->Phi(type, count, vals, merge);
1700     }
1701     return tnode;
1702   }
1703 
PrepareForLoop(const byte * pc,SsaEnv * env)1704   SsaEnv* PrepareForLoop(const byte* pc, SsaEnv* env) {
1705     if (!builder_) return Split(env);
1706     if (!env->go()) return Split(env);
1707     env->state = SsaEnv::kMerged;
1708 
1709     env->control = builder_->Loop(env->control);
1710     env->effect = builder_->EffectPhi(1, &env->effect, env->control);
1711     builder_->Terminate(env->effect, env->control);
1712     if (FLAG_wasm_loop_assignment_analysis) {
1713       BitVector* assigned = AnalyzeLoopAssignment(pc);
1714       if (failed()) return env;
1715       if (assigned != nullptr) {
1716         // Only introduce phis for variables assigned in this loop.
1717         for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1718           if (!assigned->Contains(i)) continue;
1719           env->locals[i] = builder_->Phi(local_type_vec_[i], 1, &env->locals[i],
1720                                          env->control);
1721         }
1722         SsaEnv* loop_body_env = Split(env);
1723         builder_->StackCheck(position(), &(loop_body_env->effect),
1724                              &(loop_body_env->control));
1725         return loop_body_env;
1726       }
1727     }
1728 
1729     // Conservatively introduce phis for all local variables.
1730     for (int i = EnvironmentCount() - 1; i >= 0; i--) {
1731       env->locals[i] =
1732           builder_->Phi(local_type_vec_[i], 1, &env->locals[i], env->control);
1733     }
1734 
1735     SsaEnv* loop_body_env = Split(env);
1736     builder_->StackCheck(position(), &(loop_body_env->effect),
1737                          &(loop_body_env->control));
1738     return loop_body_env;
1739   }
1740 
1741   // Create a complete copy of the {from}.
Split(SsaEnv * from)1742   SsaEnv* Split(SsaEnv* from) {
1743     DCHECK_NOT_NULL(from);
1744     SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1745     size_t size = sizeof(TFNode*) * EnvironmentCount();
1746     result->control = from->control;
1747     result->effect = from->effect;
1748 
1749     if (from->go()) {
1750       result->state = SsaEnv::kReached;
1751       result->locals =
1752           size > 0 ? reinterpret_cast<TFNode**>(zone_->New(size)) : nullptr;
1753       memcpy(result->locals, from->locals, size);
1754     } else {
1755       result->state = SsaEnv::kUnreachable;
1756       result->locals = nullptr;
1757     }
1758 
1759     return result;
1760   }
1761 
1762   // Create a copy of {from} that steals its state and leaves {from}
1763   // unreachable.
Steal(SsaEnv * from)1764   SsaEnv* Steal(SsaEnv* from) {
1765     DCHECK_NOT_NULL(from);
1766     if (!from->go()) return UnreachableEnv();
1767     SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1768     result->state = SsaEnv::kReached;
1769     result->locals = from->locals;
1770     result->control = from->control;
1771     result->effect = from->effect;
1772     from->Kill(SsaEnv::kUnreachable);
1773     return result;
1774   }
1775 
1776   // Create an unreachable environment.
UnreachableEnv()1777   SsaEnv* UnreachableEnv() {
1778     SsaEnv* result = reinterpret_cast<SsaEnv*>(zone_->New(sizeof(SsaEnv)));
1779     result->state = SsaEnv::kUnreachable;
1780     result->control = nullptr;
1781     result->effect = nullptr;
1782     result->locals = nullptr;
1783     return result;
1784   }
1785 
EnvironmentCount()1786   int EnvironmentCount() {
1787     if (builder_) return static_cast<int>(local_type_vec_.size());
1788     return 0;  // if we aren't building a graph, don't bother with SSA renaming.
1789   }
1790 
onFirstError()1791   virtual void onFirstError() {
1792     limit_ = start_;     // Terminate decoding loop.
1793     builder_ = nullptr;  // Don't build any more nodes.
1794     TRACE(" !%s\n", error_msg_.get());
1795   }
AnalyzeLoopAssignment(const byte * pc)1796   BitVector* AnalyzeLoopAssignment(const byte* pc) {
1797     if (pc >= limit_) return nullptr;
1798     if (*pc != kExprLoop) return nullptr;
1799 
1800     BitVector* assigned =
1801         new (zone_) BitVector(static_cast<int>(local_type_vec_.size()), zone_);
1802     int depth = 0;
1803     // Iteratively process all AST nodes nested inside the loop.
1804     while (pc < limit_ && ok()) {
1805       WasmOpcode opcode = static_cast<WasmOpcode>(*pc);
1806       unsigned length = 1;
1807       switch (opcode) {
1808         case kExprLoop:
1809         case kExprIf:
1810         case kExprBlock:
1811         case kExprTry:
1812           length = OpcodeLength(pc);
1813           depth++;
1814           break;
1815         case kExprSetLocal:  // fallthru
1816         case kExprTeeLocal: {
1817           LocalIndexOperand operand(this, pc);
1818           if (assigned->length() > 0 &&
1819               operand.index < static_cast<uint32_t>(assigned->length())) {
1820             // Unverified code might have an out-of-bounds index.
1821             assigned->Add(operand.index);
1822           }
1823           length = 1 + operand.length;
1824           break;
1825         }
1826         case kExprEnd:
1827           depth--;
1828           break;
1829         default:
1830           length = OpcodeLength(pc);
1831           break;
1832       }
1833       if (depth <= 0) break;
1834       pc += length;
1835     }
1836     return ok() ? assigned : nullptr;
1837   }
1838 
position()1839   inline wasm::WasmCodePosition position() {
1840     int offset = static_cast<int>(pc_ - start_);
1841     DCHECK_EQ(pc_ - start_, offset);  // overflows cannot happen
1842     return offset;
1843   }
1844 
BuildSimpleOperator(WasmOpcode opcode,FunctionSig * sig)1845   inline void BuildSimpleOperator(WasmOpcode opcode, FunctionSig* sig) {
1846     TFNode* node;
1847     switch (sig->parameter_count()) {
1848       case 1: {
1849         Value val = Pop(0, sig->GetParam(0));
1850         node = BUILD(Unop, opcode, val.node, position());
1851         break;
1852       }
1853       case 2: {
1854         Value rval = Pop(1, sig->GetParam(1));
1855         Value lval = Pop(0, sig->GetParam(0));
1856         node = BUILD(Binop, opcode, lval.node, rval.node, position());
1857         break;
1858       }
1859       default:
1860         UNREACHABLE();
1861         node = nullptr;
1862         break;
1863     }
1864     Push(GetReturnType(sig), node);
1865   }
1866 };
1867 
DecodeLocalDecls(AstLocalDecls & decls,const byte * start,const byte * end)1868 bool DecodeLocalDecls(AstLocalDecls& decls, const byte* start,
1869                       const byte* end) {
1870   AccountingAllocator allocator;
1871   Zone tmp(&allocator, ZONE_NAME);
1872   FunctionBody body = {nullptr, nullptr, nullptr, start, end};
1873   WasmFullDecoder decoder(&tmp, nullptr, body);
1874   return decoder.DecodeLocalDecls(decls);
1875 }
1876 
BytecodeIterator(const byte * start,const byte * end,AstLocalDecls * decls)1877 BytecodeIterator::BytecodeIterator(const byte* start, const byte* end,
1878                                    AstLocalDecls* decls)
1879     : Decoder(start, end) {
1880   if (decls != nullptr) {
1881     if (DecodeLocalDecls(*decls, start, end)) {
1882       pc_ += decls->decls_encoded_size;
1883       if (pc_ > end_) pc_ = end_;
1884     }
1885   }
1886 }
1887 
VerifyWasmCode(AccountingAllocator * allocator,FunctionBody & body)1888 DecodeResult VerifyWasmCode(AccountingAllocator* allocator,
1889                             FunctionBody& body) {
1890   Zone zone(allocator, ZONE_NAME);
1891   WasmFullDecoder decoder(&zone, nullptr, body);
1892   decoder.Decode();
1893   return decoder.toResult<DecodeStruct*>(nullptr);
1894 }
1895 
BuildTFGraph(AccountingAllocator * allocator,TFBuilder * builder,FunctionBody & body)1896 DecodeResult BuildTFGraph(AccountingAllocator* allocator, TFBuilder* builder,
1897                           FunctionBody& body) {
1898   Zone zone(allocator, ZONE_NAME);
1899   WasmFullDecoder decoder(&zone, builder, body);
1900   decoder.Decode();
1901   return decoder.toResult<DecodeStruct*>(nullptr);
1902 }
1903 
OpcodeLength(const byte * pc,const byte * end)1904 unsigned OpcodeLength(const byte* pc, const byte* end) {
1905   WasmDecoder decoder(nullptr, nullptr, pc, end);
1906   return decoder.OpcodeLength(pc);
1907 }
1908 
PrintAstForDebugging(const byte * start,const byte * end)1909 void PrintAstForDebugging(const byte* start, const byte* end) {
1910   AccountingAllocator allocator;
1911   OFStream os(stdout);
1912   PrintAst(&allocator, FunctionBodyForTesting(start, end), os, nullptr);
1913 }
1914 
PrintAst(AccountingAllocator * allocator,const FunctionBody & body,std::ostream & os,std::vector<std::tuple<uint32_t,int,int>> * offset_table)1915 bool PrintAst(AccountingAllocator* allocator, const FunctionBody& body,
1916               std::ostream& os,
1917               std::vector<std::tuple<uint32_t, int, int>>* offset_table) {
1918   Zone zone(allocator, ZONE_NAME);
1919   WasmFullDecoder decoder(&zone, nullptr, body);
1920   int line_nr = 0;
1921 
1922   // Print the function signature.
1923   if (body.sig) {
1924     os << "// signature: " << *body.sig << std::endl;
1925     ++line_nr;
1926   }
1927 
1928   // Print the local declarations.
1929   AstLocalDecls decls(&zone);
1930   BytecodeIterator i(body.start, body.end, &decls);
1931   if (body.start != i.pc()) {
1932     os << "// locals: ";
1933     for (auto p : decls.local_types) {
1934       LocalType type = p.first;
1935       uint32_t count = p.second;
1936       os << " " << count << " " << WasmOpcodes::TypeName(type);
1937     }
1938     os << std::endl;
1939     ++line_nr;
1940 
1941     for (const byte* locals = body.start; locals < i.pc(); locals++) {
1942       os << (locals == body.start ? "0x" : " 0x") << AsHex(*locals, 2) << ",";
1943     }
1944     os << std::endl;
1945     ++line_nr;
1946   }
1947 
1948   os << "// body: " << std::endl;
1949   ++line_nr;
1950   unsigned control_depth = 0;
1951   for (; i.has_next(); i.next()) {
1952     unsigned length = decoder.OpcodeLength(i.pc());
1953 
1954     WasmOpcode opcode = i.current();
1955     if (opcode == kExprElse) control_depth--;
1956 
1957     int num_whitespaces = control_depth < 32 ? 2 * control_depth : 64;
1958     if (offset_table) {
1959       offset_table->push_back(
1960           std::make_tuple(i.pc_offset(), line_nr, num_whitespaces));
1961     }
1962 
1963     // 64 whitespaces
1964     const char* padding =
1965         "                                                                ";
1966     os.write(padding, num_whitespaces);
1967     os << "k" << WasmOpcodes::OpcodeName(opcode) << ",";
1968 
1969     for (size_t j = 1; j < length; ++j) {
1970       os << " " << AsHex(i.pc()[j], 2) << ",";
1971     }
1972 
1973     switch (opcode) {
1974       case kExprElse:
1975         os << "   // @" << i.pc_offset();
1976         control_depth++;
1977         break;
1978       case kExprLoop:
1979       case kExprIf:
1980       case kExprBlock:
1981       case kExprTry: {
1982         BlockTypeOperand operand(&i, i.pc());
1983         os << "   // @" << i.pc_offset();
1984         for (unsigned i = 0; i < operand.arity; i++) {
1985           os << " " << WasmOpcodes::TypeName(operand.read_entry(i));
1986         }
1987         control_depth++;
1988         break;
1989       }
1990       case kExprEnd:
1991         os << "   // @" << i.pc_offset();
1992         control_depth--;
1993         break;
1994       case kExprBr: {
1995         BreakDepthOperand operand(&i, i.pc());
1996         os << "   // depth=" << operand.depth;
1997         break;
1998       }
1999       case kExprBrIf: {
2000         BreakDepthOperand operand(&i, i.pc());
2001         os << "   // depth=" << operand.depth;
2002         break;
2003       }
2004       case kExprBrTable: {
2005         BranchTableOperand operand(&i, i.pc());
2006         os << " // entries=" << operand.table_count;
2007         break;
2008       }
2009       case kExprCallIndirect: {
2010         CallIndirectOperand operand(&i, i.pc());
2011         os << "   // sig #" << operand.index;
2012         if (decoder.Complete(i.pc(), operand)) {
2013           os << ": " << *operand.sig;
2014         }
2015         break;
2016       }
2017       case kExprCallFunction: {
2018         CallFunctionOperand operand(&i, i.pc());
2019         os << " // function #" << operand.index;
2020         if (decoder.Complete(i.pc(), operand)) {
2021           os << ": " << *operand.sig;
2022         }
2023         break;
2024       }
2025       default:
2026         break;
2027       }
2028     os << std::endl;
2029     ++line_nr;
2030   }
2031 
2032   return decoder.ok();
2033 }
2034 
AnalyzeLoopAssignmentForTesting(Zone * zone,size_t num_locals,const byte * start,const byte * end)2035 BitVector* AnalyzeLoopAssignmentForTesting(Zone* zone, size_t num_locals,
2036                                            const byte* start, const byte* end) {
2037   FunctionBody body = {nullptr, nullptr, nullptr, start, end};
2038   WasmFullDecoder decoder(zone, nullptr, body);
2039   return decoder.AnalyzeLoopAssignmentForTesting(start, num_locals);
2040 }
2041 
2042 }  // namespace wasm
2043 }  // namespace internal
2044 }  // namespace v8
2045