1 // Copyright 2015 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/interpreter/bytecode-array-writer.h"
6
7 #include "src/api.h"
8 #include "src/interpreter/bytecode-label.h"
9 #include "src/interpreter/bytecode-register.h"
10 #include "src/interpreter/constant-array-builder.h"
11 #include "src/log.h"
12
13 namespace v8 {
14 namespace internal {
15 namespace interpreter {
16
17 STATIC_CONST_MEMBER_DEFINITION const size_t
18 BytecodeArrayWriter::kMaxSizeOfPackedBytecode;
19
BytecodeArrayWriter(Zone * zone,ConstantArrayBuilder * constant_array_builder,SourcePositionTableBuilder::RecordingMode source_position_mode)20 BytecodeArrayWriter::BytecodeArrayWriter(
21 Zone* zone, ConstantArrayBuilder* constant_array_builder,
22 SourcePositionTableBuilder::RecordingMode source_position_mode)
23 : bytecodes_(zone),
24 unbound_jumps_(0),
25 source_position_table_builder_(zone, source_position_mode),
26 constant_array_builder_(constant_array_builder) {
27 bytecodes_.reserve(512); // Derived via experimentation.
28 }
29
30 // override
~BytecodeArrayWriter()31 BytecodeArrayWriter::~BytecodeArrayWriter() {}
32
33 // override
ToBytecodeArray(Isolate * isolate,int register_count,int parameter_count,Handle<FixedArray> handler_table)34 Handle<BytecodeArray> BytecodeArrayWriter::ToBytecodeArray(
35 Isolate* isolate, int register_count, int parameter_count,
36 Handle<FixedArray> handler_table) {
37 DCHECK_EQ(0, unbound_jumps_);
38
39 int bytecode_size = static_cast<int>(bytecodes()->size());
40 int frame_size = register_count * kPointerSize;
41 Handle<FixedArray> constant_pool =
42 constant_array_builder()->ToFixedArray(isolate);
43 Handle<BytecodeArray> bytecode_array = isolate->factory()->NewBytecodeArray(
44 bytecode_size, &bytecodes()->front(), frame_size, parameter_count,
45 constant_pool);
46 bytecode_array->set_handler_table(*handler_table);
47 Handle<ByteArray> source_position_table =
48 source_position_table_builder()->ToSourcePositionTable(
49 isolate, Handle<AbstractCode>::cast(bytecode_array));
50 bytecode_array->set_source_position_table(*source_position_table);
51 return bytecode_array;
52 }
53
54 // override
Write(BytecodeNode * node)55 void BytecodeArrayWriter::Write(BytecodeNode* node) {
56 DCHECK(!Bytecodes::IsJump(node->bytecode()));
57 UpdateSourcePositionTable(node);
58 EmitBytecode(node);
59 }
60
61 // override
WriteJump(BytecodeNode * node,BytecodeLabel * label)62 void BytecodeArrayWriter::WriteJump(BytecodeNode* node, BytecodeLabel* label) {
63 DCHECK(Bytecodes::IsJump(node->bytecode()));
64 UpdateSourcePositionTable(node);
65 EmitJump(node, label);
66 }
67
68 // override
BindLabel(BytecodeLabel * label)69 void BytecodeArrayWriter::BindLabel(BytecodeLabel* label) {
70 size_t current_offset = bytecodes()->size();
71 if (label->is_forward_target()) {
72 // An earlier jump instruction refers to this label. Update it's location.
73 PatchJump(current_offset, label->offset());
74 // Now treat as if the label will only be back referred to.
75 }
76 label->bind_to(current_offset);
77 }
78
79 // override
BindLabel(const BytecodeLabel & target,BytecodeLabel * label)80 void BytecodeArrayWriter::BindLabel(const BytecodeLabel& target,
81 BytecodeLabel* label) {
82 DCHECK(!label->is_bound());
83 DCHECK(target.is_bound());
84 if (label->is_forward_target()) {
85 // An earlier jump instruction refers to this label. Update it's location.
86 PatchJump(target.offset(), label->offset());
87 // Now treat as if the label will only be back referred to.
88 }
89 label->bind_to(target.offset());
90 }
91
UpdateSourcePositionTable(const BytecodeNode * const node)92 void BytecodeArrayWriter::UpdateSourcePositionTable(
93 const BytecodeNode* const node) {
94 int bytecode_offset = static_cast<int>(bytecodes()->size());
95 const BytecodeSourceInfo& source_info = node->source_info();
96 if (source_info.is_valid()) {
97 source_position_table_builder()->AddPosition(
98 bytecode_offset, SourcePosition(source_info.source_position()),
99 source_info.is_statement());
100 }
101 }
102
EmitBytecode(const BytecodeNode * const node)103 void BytecodeArrayWriter::EmitBytecode(const BytecodeNode* const node) {
104 DCHECK_NE(node->bytecode(), Bytecode::kIllegal);
105
106 Bytecode bytecode = node->bytecode();
107 OperandScale operand_scale = node->operand_scale();
108
109 if (operand_scale != OperandScale::kSingle) {
110 Bytecode prefix = Bytecodes::OperandScaleToPrefixBytecode(operand_scale);
111 bytecodes()->push_back(Bytecodes::ToByte(prefix));
112 }
113 bytecodes()->push_back(Bytecodes::ToByte(bytecode));
114
115 const uint32_t* const operands = node->operands();
116 const int operand_count = node->operand_count();
117 const OperandSize* operand_sizes =
118 Bytecodes::GetOperandSizes(bytecode, operand_scale);
119 for (int i = 0; i < operand_count; ++i) {
120 switch (operand_sizes[i]) {
121 case OperandSize::kNone:
122 UNREACHABLE();
123 break;
124 case OperandSize::kByte:
125 bytecodes()->push_back(static_cast<uint8_t>(operands[i]));
126 break;
127 case OperandSize::kShort: {
128 uint16_t operand = static_cast<uint16_t>(operands[i]);
129 const uint8_t* raw_operand = reinterpret_cast<const uint8_t*>(&operand);
130 bytecodes()->push_back(raw_operand[0]);
131 bytecodes()->push_back(raw_operand[1]);
132 break;
133 }
134 case OperandSize::kQuad: {
135 const uint8_t* raw_operand =
136 reinterpret_cast<const uint8_t*>(&operands[i]);
137 bytecodes()->push_back(raw_operand[0]);
138 bytecodes()->push_back(raw_operand[1]);
139 bytecodes()->push_back(raw_operand[2]);
140 bytecodes()->push_back(raw_operand[3]);
141 break;
142 }
143 }
144 }
145 }
146
147 // static
GetJumpWithConstantOperand(Bytecode jump_bytecode)148 Bytecode GetJumpWithConstantOperand(Bytecode jump_bytecode) {
149 switch (jump_bytecode) {
150 case Bytecode::kJump:
151 return Bytecode::kJumpConstant;
152 case Bytecode::kJumpIfTrue:
153 return Bytecode::kJumpIfTrueConstant;
154 case Bytecode::kJumpIfFalse:
155 return Bytecode::kJumpIfFalseConstant;
156 case Bytecode::kJumpIfToBooleanTrue:
157 return Bytecode::kJumpIfToBooleanTrueConstant;
158 case Bytecode::kJumpIfToBooleanFalse:
159 return Bytecode::kJumpIfToBooleanFalseConstant;
160 case Bytecode::kJumpIfNotHole:
161 return Bytecode::kJumpIfNotHoleConstant;
162 case Bytecode::kJumpIfNull:
163 return Bytecode::kJumpIfNullConstant;
164 case Bytecode::kJumpIfUndefined:
165 return Bytecode::kJumpIfUndefinedConstant;
166 default:
167 UNREACHABLE();
168 return Bytecode::kIllegal;
169 }
170 }
171
PatchJumpWith8BitOperand(size_t jump_location,int delta)172 void BytecodeArrayWriter::PatchJumpWith8BitOperand(size_t jump_location,
173 int delta) {
174 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
175 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
176 size_t operand_location = jump_location + 1;
177 DCHECK_EQ(bytecodes()->at(operand_location), k8BitJumpPlaceholder);
178 if (Bytecodes::ScaleForSignedOperand(delta) == OperandScale::kSingle) {
179 // The jump fits within the range of an Imm8 operand, so cancel
180 // the reservation and jump directly.
181 constant_array_builder()->DiscardReservedEntry(OperandSize::kByte);
182 bytecodes()->at(operand_location) = static_cast<uint8_t>(delta);
183 } else {
184 // The jump does not fit within the range of an Imm8 operand, so
185 // commit reservation putting the offset into the constant pool,
186 // and update the jump instruction and operand.
187 size_t entry = constant_array_builder()->CommitReservedEntry(
188 OperandSize::kByte, Smi::FromInt(delta));
189 DCHECK_EQ(Bytecodes::SizeForUnsignedOperand(static_cast<uint32_t>(entry)),
190 OperandSize::kByte);
191 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
192 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
193 bytecodes()->at(operand_location) = static_cast<uint8_t>(entry);
194 }
195 }
196
PatchJumpWith16BitOperand(size_t jump_location,int delta)197 void BytecodeArrayWriter::PatchJumpWith16BitOperand(size_t jump_location,
198 int delta) {
199 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
200 DCHECK(Bytecodes::IsJumpImmediate(jump_bytecode));
201 size_t operand_location = jump_location + 1;
202 uint8_t operand_bytes[2];
203 if (Bytecodes::ScaleForSignedOperand(delta) <= OperandScale::kDouble) {
204 // The jump fits within the range of an Imm16 operand, so cancel
205 // the reservation and jump directly.
206 constant_array_builder()->DiscardReservedEntry(OperandSize::kShort);
207 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(delta));
208 } else {
209 // The jump does not fit within the range of an Imm16 operand, so
210 // commit reservation putting the offset into the constant pool,
211 // and update the jump instruction and operand.
212 size_t entry = constant_array_builder()->CommitReservedEntry(
213 OperandSize::kShort, Smi::FromInt(delta));
214 jump_bytecode = GetJumpWithConstantOperand(jump_bytecode);
215 bytecodes()->at(jump_location) = Bytecodes::ToByte(jump_bytecode);
216 WriteUnalignedUInt16(operand_bytes, static_cast<uint16_t>(entry));
217 }
218 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
219 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder);
220 bytecodes()->at(operand_location++) = operand_bytes[0];
221 bytecodes()->at(operand_location) = operand_bytes[1];
222 }
223
PatchJumpWith32BitOperand(size_t jump_location,int delta)224 void BytecodeArrayWriter::PatchJumpWith32BitOperand(size_t jump_location,
225 int delta) {
226 DCHECK(Bytecodes::IsJumpImmediate(
227 Bytecodes::FromByte(bytecodes()->at(jump_location))));
228 constant_array_builder()->DiscardReservedEntry(OperandSize::kQuad);
229 uint8_t operand_bytes[4];
230 WriteUnalignedUInt32(operand_bytes, static_cast<uint32_t>(delta));
231 size_t operand_location = jump_location + 1;
232 DCHECK(bytecodes()->at(operand_location) == k8BitJumpPlaceholder &&
233 bytecodes()->at(operand_location + 1) == k8BitJumpPlaceholder &&
234 bytecodes()->at(operand_location + 2) == k8BitJumpPlaceholder &&
235 bytecodes()->at(operand_location + 3) == k8BitJumpPlaceholder);
236 bytecodes()->at(operand_location++) = operand_bytes[0];
237 bytecodes()->at(operand_location++) = operand_bytes[1];
238 bytecodes()->at(operand_location++) = operand_bytes[2];
239 bytecodes()->at(operand_location) = operand_bytes[3];
240 }
241
PatchJump(size_t jump_target,size_t jump_location)242 void BytecodeArrayWriter::PatchJump(size_t jump_target, size_t jump_location) {
243 Bytecode jump_bytecode = Bytecodes::FromByte(bytecodes()->at(jump_location));
244 int delta = static_cast<int>(jump_target - jump_location);
245 int prefix_offset = 0;
246 OperandScale operand_scale = OperandScale::kSingle;
247 if (Bytecodes::IsPrefixScalingBytecode(jump_bytecode)) {
248 // If a prefix scaling bytecode is emitted the target offset is one
249 // less than the case of no prefix scaling bytecode.
250 delta -= 1;
251 prefix_offset = 1;
252 operand_scale = Bytecodes::PrefixBytecodeToOperandScale(jump_bytecode);
253 jump_bytecode =
254 Bytecodes::FromByte(bytecodes()->at(jump_location + prefix_offset));
255 }
256
257 DCHECK(Bytecodes::IsJump(jump_bytecode));
258 switch (operand_scale) {
259 case OperandScale::kSingle:
260 PatchJumpWith8BitOperand(jump_location, delta);
261 break;
262 case OperandScale::kDouble:
263 PatchJumpWith16BitOperand(jump_location + prefix_offset, delta);
264 break;
265 case OperandScale::kQuadruple:
266 PatchJumpWith32BitOperand(jump_location + prefix_offset, delta);
267 break;
268 default:
269 UNREACHABLE();
270 }
271 unbound_jumps_--;
272 }
273
EmitJump(BytecodeNode * node,BytecodeLabel * label)274 void BytecodeArrayWriter::EmitJump(BytecodeNode* node, BytecodeLabel* label) {
275 DCHECK(Bytecodes::IsJump(node->bytecode()));
276 DCHECK_EQ(0u, node->operand(0));
277
278 size_t current_offset = bytecodes()->size();
279
280 if (label->is_bound()) {
281 CHECK_GE(current_offset, label->offset());
282 CHECK_LE(current_offset, static_cast<size_t>(kMaxInt));
283 // Label has been bound already so this is a backwards jump.
284 size_t abs_delta = current_offset - label->offset();
285 int delta = -static_cast<int>(abs_delta);
286 OperandScale operand_scale = Bytecodes::ScaleForSignedOperand(delta);
287 if (operand_scale > OperandScale::kSingle) {
288 // Adjust for scaling byte prefix for wide jump offset.
289 DCHECK_LE(delta, 0);
290 delta -= 1;
291 }
292 DCHECK_EQ(Bytecode::kJumpLoop, node->bytecode());
293 node->set_bytecode(node->bytecode(), delta, node->operand(1));
294 } else {
295 // The label has not yet been bound so this is a forward reference
296 // that will be patched when the label is bound. We create a
297 // reservation in the constant pool so the jump can be patched
298 // when the label is bound. The reservation means the maximum size
299 // of the operand for the constant is known and the jump can
300 // be emitted into the bytecode stream with space for the operand.
301 unbound_jumps_++;
302 label->set_referrer(current_offset);
303 OperandSize reserved_operand_size =
304 constant_array_builder()->CreateReservedEntry();
305 DCHECK_NE(Bytecode::kJumpLoop, node->bytecode());
306 switch (reserved_operand_size) {
307 case OperandSize::kNone:
308 UNREACHABLE();
309 break;
310 case OperandSize::kByte:
311 node->set_bytecode(node->bytecode(), k8BitJumpPlaceholder);
312 break;
313 case OperandSize::kShort:
314 node->set_bytecode(node->bytecode(), k16BitJumpPlaceholder);
315 break;
316 case OperandSize::kQuad:
317 node->set_bytecode(node->bytecode(), k32BitJumpPlaceholder);
318 break;
319 }
320 }
321 EmitBytecode(node);
322 }
323
324 } // namespace interpreter
325 } // namespace internal
326 } // namespace v8
327