1 // Copyright 2015, ARM Limited
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 //   * Redistributions of source code must retain the above copyright notice,
8 //     this list of conditions and the following disclaimer.
9 //   * Redistributions in binary form must reproduce the above copyright notice,
10 //     this list of conditions and the following disclaimer in the documentation
11 //     and/or other materials provided with the distribution.
12 //   * Neither the name of ARM Limited nor the names of its contributors may be
13 //     used to endorse or promote products derived from this software without
14 //     specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #ifndef VIXL_A64_MACRO_ASSEMBLER_A64_H_
28 #define VIXL_A64_MACRO_ASSEMBLER_A64_H_
29 
30 #include <algorithm>
31 #include <limits>
32 
33 #include "vixl/globals.h"
34 #include "vixl/a64/assembler-a64.h"
35 #include "vixl/a64/debugger-a64.h"
36 
37 
38 #define LS_MACRO_LIST(V)                                      \
39   V(Ldrb, Register&, rt, LDRB_w)                              \
40   V(Strb, Register&, rt, STRB_w)                              \
41   V(Ldrsb, Register&, rt, rt.Is64Bits() ? LDRSB_x : LDRSB_w)  \
42   V(Ldrh, Register&, rt, LDRH_w)                              \
43   V(Strh, Register&, rt, STRH_w)                              \
44   V(Ldrsh, Register&, rt, rt.Is64Bits() ? LDRSH_x : LDRSH_w)  \
45   V(Ldr, CPURegister&, rt, LoadOpFor(rt))                     \
46   V(Str, CPURegister&, rt, StoreOpFor(rt))                    \
47   V(Ldrsw, Register&, rt, LDRSW_x)
48 
49 
50 #define LSPAIR_MACRO_LIST(V)                              \
51   V(Ldp, CPURegister&, rt, rt2, LoadPairOpFor(rt, rt2))   \
52   V(Stp, CPURegister&, rt, rt2, StorePairOpFor(rt, rt2))  \
53   V(Ldpsw, CPURegister&, rt, rt2, LDPSW_x)
54 
55 namespace vixl {
56 
57 // Forward declaration
58 class MacroAssembler;
59 class UseScratchRegisterScope;
60 
61 class Pool {
62  public:
Pool(MacroAssembler * masm)63   explicit Pool(MacroAssembler* masm)
64       : checkpoint_(kNoCheckpointRequired), masm_(masm) {
65     Reset();
66   }
67 
Reset()68   void Reset() {
69     checkpoint_ = kNoCheckpointRequired;
70     monitor_ = 0;
71   }
72 
Block()73   void Block() { monitor_++; }
74   void Release();
IsBlocked()75   bool IsBlocked() const { return monitor_ != 0; }
76 
77   static const ptrdiff_t kNoCheckpointRequired = PTRDIFF_MAX;
78 
79   void SetNextCheckpoint(ptrdiff_t checkpoint);
checkpoint()80   ptrdiff_t checkpoint() const { return checkpoint_; }
81 
82   enum EmitOption {
83     kBranchRequired,
84     kNoBranchRequired
85   };
86 
87  protected:
88   // Next buffer offset at which a check is required for this pool.
89   ptrdiff_t checkpoint_;
90   // Indicates whether the emission of this pool is blocked.
91   int monitor_;
92   // The MacroAssembler using this pool.
93   MacroAssembler* masm_;
94 };
95 
96 
97 class LiteralPool : public Pool {
98  public:
99   explicit LiteralPool(MacroAssembler* masm);
100   ~LiteralPool();
101   void Reset();
102 
103   template <typename T>
Add(T imm)104   RawLiteral* Add(T imm) {
105     return AddEntry(new Literal<T>(imm));
106   }
107   template <typename T>
Add(T high64,T low64)108   RawLiteral* Add(T high64, T low64) {
109     return AddEntry(new Literal<T>(high64, low64));
110   }
111   RawLiteral* AddEntry(RawLiteral* literal);
IsEmpty()112   bool IsEmpty() const { return entries_.empty(); }
113   size_t Size() const;
114   size_t MaxSize() const;
115   size_t OtherPoolsMaxSize() const;
116 
117   void CheckEmitFor(size_t amount, EmitOption option = kBranchRequired);
118   void Emit(EmitOption option = kNoBranchRequired);
119 
120   void SetNextRecommendedCheckpoint(ptrdiff_t offset);
121   ptrdiff_t NextRecommendedCheckpoint();
122 
123   // Recommended not exact since the pool can be blocked for short periods.
124   static const ptrdiff_t kRecommendedLiteralPoolRange = 128 * KBytes;
125 
126  private:
127   std::vector<RawLiteral*> entries_;
128   size_t size_;
129   ptrdiff_t first_use_;
130   // The parent class `Pool` provides a `checkpoint_`, which is the buffer
131   // offset before which a check *must* occur. This recommended checkpoint
132   // indicates when we would like to start emitting the constant pool. The
133   // MacroAssembler can, but does not have to, check the buffer when the
134   // checkpoint is reached.
135   ptrdiff_t recommended_checkpoint_;
136 };
137 
138 
Size()139 inline size_t LiteralPool::Size() const {
140   // Account for the pool header.
141   return size_ + kInstructionSize;
142 }
143 
144 
MaxSize()145 inline size_t LiteralPool::MaxSize() const {
146   // Account for the potential branch over the pool.
147   return Size() + kInstructionSize;
148 }
149 
150 
NextRecommendedCheckpoint()151 inline ptrdiff_t LiteralPool::NextRecommendedCheckpoint() {
152   return first_use_ + kRecommendedLiteralPoolRange;
153 }
154 
155 
156 class VeneerPool : public Pool {
157  public:
VeneerPool(MacroAssembler * masm)158   explicit VeneerPool(MacroAssembler* masm) : Pool(masm) {}
159 
160   void Reset();
161 
Block()162   void Block() { monitor_++; }
163   void Release();
IsBlocked()164   bool IsBlocked() const { return monitor_ != 0; }
IsEmpty()165   bool IsEmpty() const { return unresolved_branches_.empty(); }
166 
167   class BranchInfo {
168    public:
BranchInfo()169     BranchInfo()
170         : max_reachable_pc_(0), pc_offset_(0),
171           label_(NULL), branch_type_(UnknownBranchType) {}
BranchInfo(ptrdiff_t offset,Label * label,ImmBranchType branch_type)172     BranchInfo(ptrdiff_t offset, Label* label, ImmBranchType branch_type)
173         : pc_offset_(offset), label_(label), branch_type_(branch_type) {
174       max_reachable_pc_ =
175           pc_offset_ + Instruction::ImmBranchForwardRange(branch_type_);
176     }
177 
IsValidComparison(const BranchInfo & branch_1,const BranchInfo & branch_2)178     static bool IsValidComparison(const BranchInfo& branch_1,
179                                   const BranchInfo& branch_2) {
180       // BranchInfo are always compared against against other objects with
181       // the same branch type.
182       if (branch_1.branch_type_ != branch_2.branch_type_) {
183         return false;
184       }
185       // Since we should never have two branch infos with the same offsets, it
186       // first looks like we should check that offsets are different. However
187       // the operators may also be used to *search* for a branch info in the
188       // set.
189       bool same_offsets = (branch_1.pc_offset_ == branch_2.pc_offset_);
190       return (!same_offsets ||
191               ((branch_1.label_ == branch_2.label_) &&
192                (branch_1.max_reachable_pc_ == branch_2.max_reachable_pc_)));
193     }
194 
195     // We must provide comparison operators to work with InvalSet.
196     bool operator==(const BranchInfo& other) const {
197       VIXL_ASSERT(IsValidComparison(*this, other));
198       return pc_offset_ == other.pc_offset_;
199     }
200     bool operator<(const BranchInfo& other) const {
201       VIXL_ASSERT(IsValidComparison(*this, other));
202       return pc_offset_ < other.pc_offset_;
203     }
204     bool operator<=(const BranchInfo& other) const {
205       VIXL_ASSERT(IsValidComparison(*this, other));
206       return pc_offset_ <= other.pc_offset_;
207     }
208     bool operator>(const BranchInfo& other) const {
209       VIXL_ASSERT(IsValidComparison(*this, other));
210       return pc_offset_ > other.pc_offset_;
211     }
212 
213     // Maximum position reachable by the branch using a positive branch offset.
214     ptrdiff_t max_reachable_pc_;
215     // Offset of the branch in the code generation buffer.
216     ptrdiff_t pc_offset_;
217     // The label branched to.
218     Label* label_;
219     ImmBranchType branch_type_;
220   };
221 
BranchTypeUsesVeneers(ImmBranchType type)222   bool BranchTypeUsesVeneers(ImmBranchType type) {
223     return (type != UnknownBranchType) && (type != UncondBranchType);
224   }
225 
226   void RegisterUnresolvedBranch(ptrdiff_t branch_pos,
227                                 Label* label,
228                                 ImmBranchType branch_type);
229   void DeleteUnresolvedBranchInfoForLabel(Label* label);
230 
231   bool ShouldEmitVeneer(int max_reachable_pc, size_t amount);
ShouldEmitVeneers(size_t amount)232   bool ShouldEmitVeneers(size_t amount) {
233     return ShouldEmitVeneer(unresolved_branches_.FirstLimit(), amount);
234   }
235 
236   void CheckEmitFor(size_t amount, EmitOption option = kBranchRequired);
237   void Emit(EmitOption option, size_t margin);
238 
239   // The code size generated for a veneer. Currently one branch instruction.
240   // This is for code size checking purposes, and can be extended in the future
241   // for example if we decide to add nops between the veneers.
242   static const int kVeneerCodeSize = 1 * kInstructionSize;
243   // The maximum size of code other than veneers that can be generated when
244   // emitting a veneer pool. Currently there can be an additional branch to jump
245   // over the pool.
246   static const int kPoolNonVeneerCodeSize = 1 * kInstructionSize;
247 
UpdateNextCheckPoint()248   void UpdateNextCheckPoint() {
249     SetNextCheckpoint(NextCheckPoint());
250   }
251 
NumberOfPotentialVeneers()252   int NumberOfPotentialVeneers() const {
253     return unresolved_branches_.size();
254   }
255 
MaxSize()256   size_t MaxSize() const {
257     return
258         kPoolNonVeneerCodeSize + unresolved_branches_.size() * kVeneerCodeSize;
259   }
260 
261   size_t OtherPoolsMaxSize() const;
262 
263   static const int kNPreallocatedInfos = 4;
264   static const ptrdiff_t kInvalidOffset = PTRDIFF_MAX;
265   static const size_t kReclaimFrom = 128;
266   static const size_t kReclaimFactor = 16;
267 
268  private:
269   typedef InvalSet<BranchInfo,
270                    kNPreallocatedInfos,
271                    ptrdiff_t,
272                    kInvalidOffset,
273                    kReclaimFrom,
274                    kReclaimFactor> BranchInfoTypedSetBase;
275   typedef InvalSetIterator<BranchInfoTypedSetBase> BranchInfoTypedSetIterBase;
276 
277   class BranchInfoTypedSet : public BranchInfoTypedSetBase {
278    public:
BranchInfoTypedSet()279     BranchInfoTypedSet() : BranchInfoTypedSetBase() {}
280 
FirstLimit()281     ptrdiff_t FirstLimit() {
282       if (empty()) {
283         return kInvalidOffset;
284       }
285       return min_element_key();
286     }
287   };
288 
289   class BranchInfoTypedSetIterator : public BranchInfoTypedSetIterBase {
290    public:
BranchInfoTypedSetIterator()291     BranchInfoTypedSetIterator() : BranchInfoTypedSetIterBase(NULL) {}
BranchInfoTypedSetIterator(BranchInfoTypedSet * typed_set)292     explicit BranchInfoTypedSetIterator(BranchInfoTypedSet* typed_set)
293         : BranchInfoTypedSetIterBase(typed_set) {}
294   };
295 
296   class BranchInfoSet {
297    public:
insert(BranchInfo branch_info)298     void insert(BranchInfo branch_info) {
299       ImmBranchType type = branch_info.branch_type_;
300       VIXL_ASSERT(IsValidBranchType(type));
301       typed_set_[BranchIndexFromType(type)].insert(branch_info);
302     }
303 
erase(BranchInfo branch_info)304     void erase(BranchInfo branch_info) {
305       if (IsValidBranchType(branch_info.branch_type_)) {
306         int index =
307             BranchInfoSet::BranchIndexFromType(branch_info.branch_type_);
308         typed_set_[index].erase(branch_info);
309       }
310     }
311 
size()312     size_t size() const {
313       size_t res = 0;
314       for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
315         res += typed_set_[i].size();
316       }
317       return res;
318     }
319 
empty()320     bool empty() const {
321       for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
322         if (!typed_set_[i].empty()) {
323           return false;
324         }
325       }
326       return true;
327     }
328 
FirstLimit()329     ptrdiff_t FirstLimit() {
330       ptrdiff_t res = kInvalidOffset;
331       for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
332         res = std::min(res, typed_set_[i].FirstLimit());
333       }
334       return res;
335     }
336 
Reset()337     void Reset() {
338       for (int i = 0; i < kNumberOfTrackedBranchTypes; i++) {
339         typed_set_[i].clear();
340       }
341     }
342 
BranchTypeFromIndex(int index)343     static ImmBranchType BranchTypeFromIndex(int index) {
344       switch (index) {
345         case 0:
346           return CondBranchType;
347         case 1:
348           return CompareBranchType;
349         case 2:
350           return TestBranchType;
351         default:
352           VIXL_UNREACHABLE();
353           return UnknownBranchType;
354       }
355     }
BranchIndexFromType(ImmBranchType branch_type)356     static int BranchIndexFromType(ImmBranchType branch_type) {
357       switch (branch_type) {
358         case CondBranchType:
359           return 0;
360         case CompareBranchType:
361           return 1;
362         case TestBranchType:
363           return 2;
364         default:
365           VIXL_UNREACHABLE();
366           return 0;
367       }
368     }
369 
IsValidBranchType(ImmBranchType branch_type)370     bool IsValidBranchType(ImmBranchType branch_type) {
371       return (branch_type != UnknownBranchType) &&
372              (branch_type != UncondBranchType);
373     }
374 
375    private:
376     static const int kNumberOfTrackedBranchTypes = 3;
377     BranchInfoTypedSet typed_set_[kNumberOfTrackedBranchTypes];
378 
379     friend class VeneerPool;
380     friend class BranchInfoSetIterator;
381   };
382 
383   class BranchInfoSetIterator {
384    public:
BranchInfoSetIterator(BranchInfoSet * set)385     explicit BranchInfoSetIterator(BranchInfoSet* set) : set_(set) {
386       for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
387         new(&sub_iterator_[i])
388             BranchInfoTypedSetIterator(&(set_->typed_set_[i]));
389       }
390     }
391 
Current()392     VeneerPool::BranchInfo* Current() {
393       for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
394         if (!sub_iterator_[i].Done()) {
395           return sub_iterator_[i].Current();
396         }
397       }
398       VIXL_UNREACHABLE();
399       return NULL;
400     }
401 
Advance()402     void Advance() {
403       VIXL_ASSERT(!Done());
404       for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
405         if (!sub_iterator_[i].Done()) {
406           sub_iterator_[i].Advance();
407           return;
408         }
409       }
410       VIXL_UNREACHABLE();
411     }
412 
Done()413     bool Done() const {
414       for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
415         if (!sub_iterator_[i].Done()) return false;
416       }
417       return true;
418     }
419 
AdvanceToNextType()420     void AdvanceToNextType() {
421       VIXL_ASSERT(!Done());
422       for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
423         if (!sub_iterator_[i].Done()) {
424           sub_iterator_[i].Finish();
425           return;
426         }
427       }
428       VIXL_UNREACHABLE();
429     }
430 
DeleteCurrentAndAdvance()431     void DeleteCurrentAndAdvance() {
432       for (int i = 0; i < BranchInfoSet::kNumberOfTrackedBranchTypes; i++) {
433         if (!sub_iterator_[i].Done()) {
434           sub_iterator_[i].DeleteCurrentAndAdvance();
435           return;
436         }
437       }
438     }
439 
440    private:
441     BranchInfoSet* set_;
442     BranchInfoTypedSetIterator
443         sub_iterator_[BranchInfoSet::kNumberOfTrackedBranchTypes];
444   };
445 
NextCheckPoint()446   ptrdiff_t NextCheckPoint() {
447     if (unresolved_branches_.empty()) {
448       return kNoCheckpointRequired;
449     } else {
450       return unresolved_branches_.FirstLimit();
451     }
452   }
453 
454   // Information about unresolved (forward) branches.
455   BranchInfoSet unresolved_branches_;
456 };
457 
458 
459 // Required InvalSet template specialisations.
460 template<>
461 inline ptrdiff_t InvalSet<VeneerPool::BranchInfo,
462                           VeneerPool::kNPreallocatedInfos,
463                           ptrdiff_t,
464                           VeneerPool::kInvalidOffset,
465                           VeneerPool::kReclaimFrom,
Key(const VeneerPool::BranchInfo & branch_info)466                           VeneerPool::kReclaimFactor>::Key(
467                               const VeneerPool::BranchInfo& branch_info) {
468   return branch_info.max_reachable_pc_;
469 }
470 template<>
471 inline void InvalSet<VeneerPool::BranchInfo,
472                      VeneerPool::kNPreallocatedInfos,
473                      ptrdiff_t,
474                      VeneerPool::kInvalidOffset,
475                      VeneerPool::kReclaimFrom,
SetKey(VeneerPool::BranchInfo * branch_info,ptrdiff_t key)476                      VeneerPool::kReclaimFactor>::SetKey(
477                          VeneerPool::BranchInfo* branch_info, ptrdiff_t key) {
478   branch_info->max_reachable_pc_ = key;
479 }
480 
481 
482 // This scope has the following purposes:
483 //  * Acquire/Release the underlying assembler's code buffer.
484 //     * This is mandatory before emitting.
485 //  * Emit the literal pool if necessary before emitting the macro-instruction.
486 //  * Ensure there is enough space to emit the macro-instruction.
487 class EmissionCheckScope {
488  public:
489   EmissionCheckScope(MacroAssembler* masm, size_t size);
490   ~EmissionCheckScope();
491 
492  protected:
493 #ifdef VIXL_DEBUG
494   MacroAssembler* masm_;
495   Label start_;
496   size_t size_;
497 #endif
498 };
499 
500 
501 // Helper for common Emission checks.
502 // The macro-instruction maps to a single instruction.
503 class SingleEmissionCheckScope : public EmissionCheckScope {
504  public:
SingleEmissionCheckScope(MacroAssembler * masm)505   explicit SingleEmissionCheckScope(MacroAssembler* masm)
506       : EmissionCheckScope(masm, kInstructionSize) {}
507 };
508 
509 
510 // The macro instruction is a "typical" macro-instruction. Typical macro-
511 // instruction only emit a few instructions, a few being defined as 8 here.
512 class MacroEmissionCheckScope : public EmissionCheckScope {
513  public:
MacroEmissionCheckScope(MacroAssembler * masm)514   explicit MacroEmissionCheckScope(MacroAssembler* masm)
515       : EmissionCheckScope(masm, kTypicalMacroInstructionMaxSize) {}
516 
517  private:
518   static const size_t kTypicalMacroInstructionMaxSize = 8 * kInstructionSize;
519 };
520 
521 
522 enum BranchType {
523   // Copies of architectural conditions.
524   // The associated conditions can be used in place of those, the code will
525   // take care of reinterpreting them with the correct type.
526   integer_eq = eq,
527   integer_ne = ne,
528   integer_hs = hs,
529   integer_lo = lo,
530   integer_mi = mi,
531   integer_pl = pl,
532   integer_vs = vs,
533   integer_vc = vc,
534   integer_hi = hi,
535   integer_ls = ls,
536   integer_ge = ge,
537   integer_lt = lt,
538   integer_gt = gt,
539   integer_le = le,
540   integer_al = al,
541   integer_nv = nv,
542 
543   // These two are *different* from the architectural codes al and nv.
544   // 'always' is used to generate unconditional branches.
545   // 'never' is used to not generate a branch (generally as the inverse
546   // branch type of 'always).
547   always, never,
548   // cbz and cbnz
549   reg_zero, reg_not_zero,
550   // tbz and tbnz
551   reg_bit_clear, reg_bit_set,
552 
553   // Aliases.
554   kBranchTypeFirstCondition = eq,
555   kBranchTypeLastCondition = nv,
556   kBranchTypeFirstUsingReg = reg_zero,
557   kBranchTypeFirstUsingBit = reg_bit_clear
558 };
559 
560 
561 enum DiscardMoveMode { kDontDiscardForSameWReg, kDiscardForSameWReg };
562 
563 
564 class MacroAssembler : public Assembler {
565  public:
566   MacroAssembler(size_t capacity,
567                  PositionIndependentCodeOption pic = PositionIndependentCode);
568   MacroAssembler(byte * buffer, size_t capacity,
569                  PositionIndependentCodeOption pic = PositionIndependentCode);
570   ~MacroAssembler();
571 
572   // Start generating code from the beginning of the buffer, discarding any code
573   // and data that has already been emitted into the buffer.
574   //
575   // In order to avoid any accidental transfer of state, Reset ASSERTs that the
576   // constant pool is not blocked.
577   void Reset();
578 
579   // Finalize a code buffer of generated instructions. This function must be
580   // called before executing or copying code from the buffer.
581   void FinalizeCode();
582 
583 
584   // Constant generation helpers.
585   // These functions return the number of instructions required to move the
586   // immediate into the destination register. Also, if the masm pointer is
587   // non-null, it generates the code to do so.
588   // The two features are implemented using one function to avoid duplication of
589   // the logic.
590   // The function can be used to evaluate the cost of synthesizing an
591   // instruction using 'mov immediate' instructions. A user might prefer loading
592   // a constant using the literal pool instead of using multiple 'mov immediate'
593   // instructions.
594   static int MoveImmediateHelper(MacroAssembler* masm,
595                                  const Register &rd,
596                                  uint64_t imm);
597   static bool OneInstrMoveImmediateHelper(MacroAssembler* masm,
598                                           const Register& dst,
599                                           int64_t imm);
600 
601 
602   // Logical macros.
603   void And(const Register& rd,
604            const Register& rn,
605            const Operand& operand);
606   void Ands(const Register& rd,
607             const Register& rn,
608             const Operand& operand);
609   void Bic(const Register& rd,
610            const Register& rn,
611            const Operand& operand);
612   void Bics(const Register& rd,
613             const Register& rn,
614             const Operand& operand);
615   void Orr(const Register& rd,
616            const Register& rn,
617            const Operand& operand);
618   void Orn(const Register& rd,
619            const Register& rn,
620            const Operand& operand);
621   void Eor(const Register& rd,
622            const Register& rn,
623            const Operand& operand);
624   void Eon(const Register& rd,
625            const Register& rn,
626            const Operand& operand);
627   void Tst(const Register& rn, const Operand& operand);
628   void LogicalMacro(const Register& rd,
629                     const Register& rn,
630                     const Operand& operand,
631                     LogicalOp op);
632 
633   // Add and sub macros.
634   void Add(const Register& rd,
635            const Register& rn,
636            const Operand& operand,
637            FlagsUpdate S = LeaveFlags);
638   void Adds(const Register& rd,
639             const Register& rn,
640             const Operand& operand);
641   void Sub(const Register& rd,
642            const Register& rn,
643            const Operand& operand,
644            FlagsUpdate S = LeaveFlags);
645   void Subs(const Register& rd,
646             const Register& rn,
647             const Operand& operand);
648   void Cmn(const Register& rn, const Operand& operand);
649   void Cmp(const Register& rn, const Operand& operand);
650   void Neg(const Register& rd,
651            const Operand& operand);
652   void Negs(const Register& rd,
653             const Operand& operand);
654 
655   void AddSubMacro(const Register& rd,
656                    const Register& rn,
657                    const Operand& operand,
658                    FlagsUpdate S,
659                    AddSubOp op);
660 
661   // Add/sub with carry macros.
662   void Adc(const Register& rd,
663            const Register& rn,
664            const Operand& operand);
665   void Adcs(const Register& rd,
666             const Register& rn,
667             const Operand& operand);
668   void Sbc(const Register& rd,
669            const Register& rn,
670            const Operand& operand);
671   void Sbcs(const Register& rd,
672             const Register& rn,
673             const Operand& operand);
674   void Ngc(const Register& rd,
675            const Operand& operand);
676   void Ngcs(const Register& rd,
677             const Operand& operand);
678   void AddSubWithCarryMacro(const Register& rd,
679                             const Register& rn,
680                             const Operand& operand,
681                             FlagsUpdate S,
682                             AddSubWithCarryOp op);
683 
684   // Move macros.
685   void Mov(const Register& rd, uint64_t imm);
686   void Mov(const Register& rd,
687            const Operand& operand,
688            DiscardMoveMode discard_mode = kDontDiscardForSameWReg);
Mvn(const Register & rd,uint64_t imm)689   void Mvn(const Register& rd, uint64_t imm) {
690     Mov(rd, (rd.size() == kXRegSize) ? ~imm : (~imm & kWRegMask));
691   }
692   void Mvn(const Register& rd, const Operand& operand);
693 
694   // Try to move an immediate into the destination register in a single
695   // instruction. Returns true for success, and updates the contents of dst.
696   // Returns false, otherwise.
697   bool TryOneInstrMoveImmediate(const Register& dst, int64_t imm);
698 
699   // Move an immediate into register dst, and return an Operand object for
700   // use with a subsequent instruction that accepts a shift. The value moved
701   // into dst is not necessarily equal to imm; it may have had a shifting
702   // operation applied to it that will be subsequently undone by the shift
703   // applied in the Operand.
704   Operand MoveImmediateForShiftedOp(const Register& dst, int64_t imm);
705 
706   // Synthesises the address represented by a MemOperand into a register.
707   void ComputeAddress(const Register& dst, const MemOperand& mem_op);
708 
709   // Conditional macros.
710   void Ccmp(const Register& rn,
711             const Operand& operand,
712             StatusFlags nzcv,
713             Condition cond);
714   void Ccmn(const Register& rn,
715             const Operand& operand,
716             StatusFlags nzcv,
717             Condition cond);
718   void ConditionalCompareMacro(const Register& rn,
719                                const Operand& operand,
720                                StatusFlags nzcv,
721                                Condition cond,
722                                ConditionalCompareOp op);
723   void Csel(const Register& rd,
724             const Register& rn,
725             const Operand& operand,
726             Condition cond);
727 
728   // Load/store macros.
729 #define DECLARE_FUNCTION(FN, REGTYPE, REG, OP) \
730   void FN(const REGTYPE REG, const MemOperand& addr);
731   LS_MACRO_LIST(DECLARE_FUNCTION)
732 #undef DECLARE_FUNCTION
733 
734   void LoadStoreMacro(const CPURegister& rt,
735                       const MemOperand& addr,
736                       LoadStoreOp op);
737 
738 #define DECLARE_FUNCTION(FN, REGTYPE, REG, REG2, OP) \
739   void FN(const REGTYPE REG, const REGTYPE REG2, const MemOperand& addr);
740   LSPAIR_MACRO_LIST(DECLARE_FUNCTION)
741 #undef DECLARE_FUNCTION
742 
743   void LoadStorePairMacro(const CPURegister& rt,
744                           const CPURegister& rt2,
745                           const MemOperand& addr,
746                           LoadStorePairOp op);
747 
748   void Prfm(PrefetchOperation op, const MemOperand& addr);
749 
750   // Push or pop up to 4 registers of the same width to or from the stack,
751   // using the current stack pointer as set by SetStackPointer.
752   //
753   // If an argument register is 'NoReg', all further arguments are also assumed
754   // to be 'NoReg', and are thus not pushed or popped.
755   //
756   // Arguments are ordered such that "Push(a, b);" is functionally equivalent
757   // to "Push(a); Push(b);".
758   //
759   // It is valid to push the same register more than once, and there is no
760   // restriction on the order in which registers are specified.
761   //
762   // It is not valid to pop into the same register more than once in one
763   // operation, not even into the zero register.
764   //
765   // If the current stack pointer (as set by SetStackPointer) is sp, then it
766   // must be aligned to 16 bytes on entry and the total size of the specified
767   // registers must also be a multiple of 16 bytes.
768   //
769   // Even if the current stack pointer is not the system stack pointer (sp),
770   // Push (and derived methods) will still modify the system stack pointer in
771   // order to comply with ABI rules about accessing memory below the system
772   // stack pointer.
773   //
774   // Other than the registers passed into Pop, the stack pointer and (possibly)
775   // the system stack pointer, these methods do not modify any other registers.
776   void Push(const CPURegister& src0, const CPURegister& src1 = NoReg,
777             const CPURegister& src2 = NoReg, const CPURegister& src3 = NoReg);
778   void Pop(const CPURegister& dst0, const CPURegister& dst1 = NoReg,
779            const CPURegister& dst2 = NoReg, const CPURegister& dst3 = NoReg);
780 
781   // Alternative forms of Push and Pop, taking a RegList or CPURegList that
782   // specifies the registers that are to be pushed or popped. Higher-numbered
783   // registers are associated with higher memory addresses (as in the A32 push
784   // and pop instructions).
785   //
786   // (Push|Pop)SizeRegList allow you to specify the register size as a
787   // parameter. Only kXRegSize, kWRegSize, kDRegSize and kSRegSize are
788   // supported.
789   //
790   // Otherwise, (Push|Pop)(CPU|X|W|D|S)RegList is preferred.
791   void PushCPURegList(CPURegList registers);
792   void PopCPURegList(CPURegList registers);
793 
794   void PushSizeRegList(RegList registers, unsigned reg_size,
795       CPURegister::RegisterType type = CPURegister::kRegister) {
796     PushCPURegList(CPURegList(type, reg_size, registers));
797   }
798   void PopSizeRegList(RegList registers, unsigned reg_size,
799       CPURegister::RegisterType type = CPURegister::kRegister) {
800     PopCPURegList(CPURegList(type, reg_size, registers));
801   }
PushXRegList(RegList regs)802   void PushXRegList(RegList regs) {
803     PushSizeRegList(regs, kXRegSize);
804   }
PopXRegList(RegList regs)805   void PopXRegList(RegList regs) {
806     PopSizeRegList(regs, kXRegSize);
807   }
PushWRegList(RegList regs)808   void PushWRegList(RegList regs) {
809     PushSizeRegList(regs, kWRegSize);
810   }
PopWRegList(RegList regs)811   void PopWRegList(RegList regs) {
812     PopSizeRegList(regs, kWRegSize);
813   }
PushDRegList(RegList regs)814   void PushDRegList(RegList regs) {
815     PushSizeRegList(regs, kDRegSize, CPURegister::kVRegister);
816   }
PopDRegList(RegList regs)817   void PopDRegList(RegList regs) {
818     PopSizeRegList(regs, kDRegSize, CPURegister::kVRegister);
819   }
PushSRegList(RegList regs)820   void PushSRegList(RegList regs) {
821     PushSizeRegList(regs, kSRegSize, CPURegister::kVRegister);
822   }
PopSRegList(RegList regs)823   void PopSRegList(RegList regs) {
824     PopSizeRegList(regs, kSRegSize, CPURegister::kVRegister);
825   }
826 
827   // Push the specified register 'count' times.
828   void PushMultipleTimes(int count, Register src);
829 
830   // Poke 'src' onto the stack. The offset is in bytes.
831   //
832   // If the current stack pointer (as set by SetStackPointer) is sp, then sp
833   // must be aligned to 16 bytes.
834   void Poke(const Register& src, const Operand& offset);
835 
836   // Peek at a value on the stack, and put it in 'dst'. The offset is in bytes.
837   //
838   // If the current stack pointer (as set by SetStackPointer) is sp, then sp
839   // must be aligned to 16 bytes.
840   void Peek(const Register& dst, const Operand& offset);
841 
842   // Alternative forms of Peek and Poke, taking a RegList or CPURegList that
843   // specifies the registers that are to be pushed or popped. Higher-numbered
844   // registers are associated with higher memory addresses.
845   //
846   // (Peek|Poke)SizeRegList allow you to specify the register size as a
847   // parameter. Only kXRegSize, kWRegSize, kDRegSize and kSRegSize are
848   // supported.
849   //
850   // Otherwise, (Peek|Poke)(CPU|X|W|D|S)RegList is preferred.
PeekCPURegList(CPURegList registers,int64_t offset)851   void PeekCPURegList(CPURegList registers, int64_t offset) {
852     LoadCPURegList(registers, MemOperand(StackPointer(), offset));
853   }
PokeCPURegList(CPURegList registers,int64_t offset)854   void PokeCPURegList(CPURegList registers, int64_t offset) {
855     StoreCPURegList(registers, MemOperand(StackPointer(), offset));
856   }
857 
858   void PeekSizeRegList(RegList registers, int64_t offset, unsigned reg_size,
859       CPURegister::RegisterType type = CPURegister::kRegister) {
860     PeekCPURegList(CPURegList(type, reg_size, registers), offset);
861   }
862   void PokeSizeRegList(RegList registers, int64_t offset, unsigned reg_size,
863       CPURegister::RegisterType type = CPURegister::kRegister) {
864     PokeCPURegList(CPURegList(type, reg_size, registers), offset);
865   }
PeekXRegList(RegList regs,int64_t offset)866   void PeekXRegList(RegList regs, int64_t offset) {
867     PeekSizeRegList(regs, offset, kXRegSize);
868   }
PokeXRegList(RegList regs,int64_t offset)869   void PokeXRegList(RegList regs, int64_t offset) {
870     PokeSizeRegList(regs, offset, kXRegSize);
871   }
PeekWRegList(RegList regs,int64_t offset)872   void PeekWRegList(RegList regs, int64_t offset) {
873     PeekSizeRegList(regs, offset, kWRegSize);
874   }
PokeWRegList(RegList regs,int64_t offset)875   void PokeWRegList(RegList regs, int64_t offset) {
876     PokeSizeRegList(regs, offset, kWRegSize);
877   }
PeekDRegList(RegList regs,int64_t offset)878   void PeekDRegList(RegList regs, int64_t offset) {
879     PeekSizeRegList(regs, offset, kDRegSize, CPURegister::kVRegister);
880   }
PokeDRegList(RegList regs,int64_t offset)881   void PokeDRegList(RegList regs, int64_t offset) {
882     PokeSizeRegList(regs, offset, kDRegSize, CPURegister::kVRegister);
883   }
PeekSRegList(RegList regs,int64_t offset)884   void PeekSRegList(RegList regs, int64_t offset) {
885     PeekSizeRegList(regs, offset, kSRegSize, CPURegister::kVRegister);
886   }
PokeSRegList(RegList regs,int64_t offset)887   void PokeSRegList(RegList regs, int64_t offset) {
888     PokeSizeRegList(regs, offset, kSRegSize, CPURegister::kVRegister);
889   }
890 
891 
892   // Claim or drop stack space without actually accessing memory.
893   //
894   // If the current stack pointer (as set by SetStackPointer) is sp, then it
895   // must be aligned to 16 bytes and the size claimed or dropped must be a
896   // multiple of 16 bytes.
897   void Claim(const Operand& size);
898   void Drop(const Operand& size);
899 
900   // Preserve the callee-saved registers (as defined by AAPCS64).
901   //
902   // Higher-numbered registers are pushed before lower-numbered registers, and
903   // thus get higher addresses.
904   // Floating-point registers are pushed before general-purpose registers, and
905   // thus get higher addresses.
906   //
907   // This method must not be called unless StackPointer() is sp, and it is
908   // aligned to 16 bytes.
909   void PushCalleeSavedRegisters();
910 
911   // Restore the callee-saved registers (as defined by AAPCS64).
912   //
913   // Higher-numbered registers are popped after lower-numbered registers, and
914   // thus come from higher addresses.
915   // Floating-point registers are popped after general-purpose registers, and
916   // thus come from higher addresses.
917   //
918   // This method must not be called unless StackPointer() is sp, and it is
919   // aligned to 16 bytes.
920   void PopCalleeSavedRegisters();
921 
922   void LoadCPURegList(CPURegList registers, const MemOperand& src);
923   void StoreCPURegList(CPURegList registers, const MemOperand& dst);
924 
925   // Remaining instructions are simple pass-through calls to the assembler.
Adr(const Register & rd,Label * label)926   void Adr(const Register& rd, Label* label) {
927     VIXL_ASSERT(allow_macro_instructions_);
928     VIXL_ASSERT(!rd.IsZero());
929     SingleEmissionCheckScope guard(this);
930     adr(rd, label);
931   }
Adrp(const Register & rd,Label * label)932   void Adrp(const Register& rd, Label* label) {
933     VIXL_ASSERT(allow_macro_instructions_);
934     VIXL_ASSERT(!rd.IsZero());
935     SingleEmissionCheckScope guard(this);
936     adrp(rd, label);
937   }
Asr(const Register & rd,const Register & rn,unsigned shift)938   void Asr(const Register& rd, const Register& rn, unsigned shift) {
939     VIXL_ASSERT(allow_macro_instructions_);
940     VIXL_ASSERT(!rd.IsZero());
941     VIXL_ASSERT(!rn.IsZero());
942     SingleEmissionCheckScope guard(this);
943     asr(rd, rn, shift);
944   }
Asr(const Register & rd,const Register & rn,const Register & rm)945   void Asr(const Register& rd, const Register& rn, const Register& rm) {
946     VIXL_ASSERT(allow_macro_instructions_);
947     VIXL_ASSERT(!rd.IsZero());
948     VIXL_ASSERT(!rn.IsZero());
949     VIXL_ASSERT(!rm.IsZero());
950     SingleEmissionCheckScope guard(this);
951     asrv(rd, rn, rm);
952   }
953 
954   // Branch type inversion relies on these relations.
955   VIXL_STATIC_ASSERT((reg_zero      == (reg_not_zero ^ 1)) &&
956                      (reg_bit_clear == (reg_bit_set ^ 1)) &&
957                      (always        == (never ^ 1)));
958 
InvertBranchType(BranchType type)959   BranchType InvertBranchType(BranchType type) {
960     if (kBranchTypeFirstCondition <= type && type <= kBranchTypeLastCondition) {
961       return static_cast<BranchType>(
962           InvertCondition(static_cast<Condition>(type)));
963     } else {
964       return static_cast<BranchType>(type ^ 1);
965     }
966   }
967 
968   void B(Label* label, BranchType type, Register reg = NoReg, int bit = -1);
969 
970   void B(Label* label);
971   void B(Label* label, Condition cond);
B(Condition cond,Label * label)972   void B(Condition cond, Label* label) {
973     B(label, cond);
974   }
Bfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)975   void Bfm(const Register& rd,
976            const Register& rn,
977            unsigned immr,
978            unsigned imms) {
979     VIXL_ASSERT(allow_macro_instructions_);
980     VIXL_ASSERT(!rd.IsZero());
981     VIXL_ASSERT(!rn.IsZero());
982     SingleEmissionCheckScope guard(this);
983     bfm(rd, rn, immr, imms);
984   }
Bfi(const Register & rd,const Register & rn,unsigned lsb,unsigned width)985   void Bfi(const Register& rd,
986            const Register& rn,
987            unsigned lsb,
988            unsigned width) {
989     VIXL_ASSERT(allow_macro_instructions_);
990     VIXL_ASSERT(!rd.IsZero());
991     VIXL_ASSERT(!rn.IsZero());
992     SingleEmissionCheckScope guard(this);
993     bfi(rd, rn, lsb, width);
994   }
Bfxil(const Register & rd,const Register & rn,unsigned lsb,unsigned width)995   void Bfxil(const Register& rd,
996              const Register& rn,
997              unsigned lsb,
998              unsigned width) {
999     VIXL_ASSERT(allow_macro_instructions_);
1000     VIXL_ASSERT(!rd.IsZero());
1001     VIXL_ASSERT(!rn.IsZero());
1002     SingleEmissionCheckScope guard(this);
1003     bfxil(rd, rn, lsb, width);
1004   }
1005   void Bind(Label* label);
1006   // Bind a label to a specified offset from the start of the buffer.
1007   void BindToOffset(Label* label, ptrdiff_t offset);
Bl(Label * label)1008   void Bl(Label* label) {
1009     VIXL_ASSERT(allow_macro_instructions_);
1010     SingleEmissionCheckScope guard(this);
1011     bl(label);
1012   }
Blr(const Register & xn)1013   void Blr(const Register& xn) {
1014     VIXL_ASSERT(allow_macro_instructions_);
1015     VIXL_ASSERT(!xn.IsZero());
1016     SingleEmissionCheckScope guard(this);
1017     blr(xn);
1018   }
Br(const Register & xn)1019   void Br(const Register& xn) {
1020     VIXL_ASSERT(allow_macro_instructions_);
1021     VIXL_ASSERT(!xn.IsZero());
1022     SingleEmissionCheckScope guard(this);
1023     br(xn);
1024   }
1025   void Brk(int code = 0) {
1026     VIXL_ASSERT(allow_macro_instructions_);
1027     SingleEmissionCheckScope guard(this);
1028     brk(code);
1029   }
1030   void Cbnz(const Register& rt, Label* label);
1031   void Cbz(const Register& rt, Label* label);
Cinc(const Register & rd,const Register & rn,Condition cond)1032   void Cinc(const Register& rd, const Register& rn, Condition cond) {
1033     VIXL_ASSERT(allow_macro_instructions_);
1034     VIXL_ASSERT(!rd.IsZero());
1035     VIXL_ASSERT(!rn.IsZero());
1036     SingleEmissionCheckScope guard(this);
1037     cinc(rd, rn, cond);
1038   }
Cinv(const Register & rd,const Register & rn,Condition cond)1039   void Cinv(const Register& rd, const Register& rn, Condition cond) {
1040     VIXL_ASSERT(allow_macro_instructions_);
1041     VIXL_ASSERT(!rd.IsZero());
1042     VIXL_ASSERT(!rn.IsZero());
1043     SingleEmissionCheckScope guard(this);
1044     cinv(rd, rn, cond);
1045   }
Clrex()1046   void Clrex() {
1047     VIXL_ASSERT(allow_macro_instructions_);
1048     SingleEmissionCheckScope guard(this);
1049     clrex();
1050   }
Cls(const Register & rd,const Register & rn)1051   void Cls(const Register& rd, const Register& rn) {
1052     VIXL_ASSERT(allow_macro_instructions_);
1053     VIXL_ASSERT(!rd.IsZero());
1054     VIXL_ASSERT(!rn.IsZero());
1055     SingleEmissionCheckScope guard(this);
1056     cls(rd, rn);
1057   }
Clz(const Register & rd,const Register & rn)1058   void Clz(const Register& rd, const Register& rn) {
1059     VIXL_ASSERT(allow_macro_instructions_);
1060     VIXL_ASSERT(!rd.IsZero());
1061     VIXL_ASSERT(!rn.IsZero());
1062     SingleEmissionCheckScope guard(this);
1063     clz(rd, rn);
1064   }
Cneg(const Register & rd,const Register & rn,Condition cond)1065   void Cneg(const Register& rd, const Register& rn, Condition cond) {
1066     VIXL_ASSERT(allow_macro_instructions_);
1067     VIXL_ASSERT(!rd.IsZero());
1068     VIXL_ASSERT(!rn.IsZero());
1069     SingleEmissionCheckScope guard(this);
1070     cneg(rd, rn, cond);
1071   }
Cset(const Register & rd,Condition cond)1072   void Cset(const Register& rd, Condition cond) {
1073     VIXL_ASSERT(allow_macro_instructions_);
1074     VIXL_ASSERT(!rd.IsZero());
1075     SingleEmissionCheckScope guard(this);
1076     cset(rd, cond);
1077   }
Csetm(const Register & rd,Condition cond)1078   void Csetm(const Register& rd, Condition cond) {
1079     VIXL_ASSERT(allow_macro_instructions_);
1080     VIXL_ASSERT(!rd.IsZero());
1081     SingleEmissionCheckScope guard(this);
1082     csetm(rd, cond);
1083   }
Csinc(const Register & rd,const Register & rn,const Register & rm,Condition cond)1084   void Csinc(const Register& rd,
1085              const Register& rn,
1086              const Register& rm,
1087              Condition cond) {
1088     VIXL_ASSERT(allow_macro_instructions_);
1089     VIXL_ASSERT(!rd.IsZero());
1090     VIXL_ASSERT(!rn.IsZero());
1091     VIXL_ASSERT(!rm.IsZero());
1092     VIXL_ASSERT((cond != al) && (cond != nv));
1093     SingleEmissionCheckScope guard(this);
1094     csinc(rd, rn, rm, cond);
1095   }
Csinv(const Register & rd,const Register & rn,const Register & rm,Condition cond)1096   void Csinv(const Register& rd,
1097              const Register& rn,
1098              const Register& rm,
1099              Condition cond) {
1100     VIXL_ASSERT(allow_macro_instructions_);
1101     VIXL_ASSERT(!rd.IsZero());
1102     VIXL_ASSERT(!rn.IsZero());
1103     VIXL_ASSERT(!rm.IsZero());
1104     VIXL_ASSERT((cond != al) && (cond != nv));
1105     SingleEmissionCheckScope guard(this);
1106     csinv(rd, rn, rm, cond);
1107   }
Csneg(const Register & rd,const Register & rn,const Register & rm,Condition cond)1108   void Csneg(const Register& rd,
1109              const Register& rn,
1110              const Register& rm,
1111              Condition cond) {
1112     VIXL_ASSERT(allow_macro_instructions_);
1113     VIXL_ASSERT(!rd.IsZero());
1114     VIXL_ASSERT(!rn.IsZero());
1115     VIXL_ASSERT(!rm.IsZero());
1116     VIXL_ASSERT((cond != al) && (cond != nv));
1117     SingleEmissionCheckScope guard(this);
1118     csneg(rd, rn, rm, cond);
1119   }
Dmb(BarrierDomain domain,BarrierType type)1120   void Dmb(BarrierDomain domain, BarrierType type) {
1121     VIXL_ASSERT(allow_macro_instructions_);
1122     SingleEmissionCheckScope guard(this);
1123     dmb(domain, type);
1124   }
Dsb(BarrierDomain domain,BarrierType type)1125   void Dsb(BarrierDomain domain, BarrierType type) {
1126     VIXL_ASSERT(allow_macro_instructions_);
1127     SingleEmissionCheckScope guard(this);
1128     dsb(domain, type);
1129   }
Extr(const Register & rd,const Register & rn,const Register & rm,unsigned lsb)1130   void Extr(const Register& rd,
1131             const Register& rn,
1132             const Register& rm,
1133             unsigned lsb) {
1134     VIXL_ASSERT(allow_macro_instructions_);
1135     VIXL_ASSERT(!rd.IsZero());
1136     VIXL_ASSERT(!rn.IsZero());
1137     VIXL_ASSERT(!rm.IsZero());
1138     SingleEmissionCheckScope guard(this);
1139     extr(rd, rn, rm, lsb);
1140   }
Fadd(const VRegister & vd,const VRegister & vn,const VRegister & vm)1141   void Fadd(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1142     VIXL_ASSERT(allow_macro_instructions_);
1143     SingleEmissionCheckScope guard(this);
1144     fadd(vd, vn, vm);
1145   }
1146   void Fccmp(const VRegister& vn,
1147              const VRegister& vm,
1148              StatusFlags nzcv,
1149              Condition cond,
1150              FPTrapFlags trap = DisableTrap) {
1151     VIXL_ASSERT(allow_macro_instructions_);
1152     VIXL_ASSERT((cond != al) && (cond != nv));
1153     SingleEmissionCheckScope guard(this);
1154     FPCCompareMacro(vn, vm, nzcv, cond, trap);
1155   }
Fccmpe(const VRegister & vn,const VRegister & vm,StatusFlags nzcv,Condition cond)1156   void Fccmpe(const VRegister& vn,
1157               const VRegister& vm,
1158               StatusFlags nzcv,
1159               Condition cond) {
1160     Fccmp(vn, vm, nzcv, cond, EnableTrap);
1161   }
1162   void Fcmp(const VRegister& vn, const VRegister& vm,
1163             FPTrapFlags trap = DisableTrap) {
1164     VIXL_ASSERT(allow_macro_instructions_);
1165     SingleEmissionCheckScope guard(this);
1166     FPCompareMacro(vn, vm, trap);
1167   }
1168   void Fcmp(const VRegister& vn, double value,
1169             FPTrapFlags trap = DisableTrap);
1170   void Fcmpe(const VRegister& vn, double value);
Fcmpe(const VRegister & vn,const VRegister & vm)1171   void Fcmpe(const VRegister& vn, const VRegister& vm) {
1172     Fcmp(vn, vm, EnableTrap);
1173   }
Fcsel(const VRegister & vd,const VRegister & vn,const VRegister & vm,Condition cond)1174   void Fcsel(const VRegister& vd,
1175              const VRegister& vn,
1176              const VRegister& vm,
1177              Condition cond) {
1178     VIXL_ASSERT(allow_macro_instructions_);
1179     VIXL_ASSERT((cond != al) && (cond != nv));
1180     SingleEmissionCheckScope guard(this);
1181     fcsel(vd, vn, vm, cond);
1182   }
Fcvt(const VRegister & vd,const VRegister & vn)1183   void Fcvt(const VRegister& vd, const VRegister& vn) {
1184     VIXL_ASSERT(allow_macro_instructions_);
1185     SingleEmissionCheckScope guard(this);
1186     fcvt(vd, vn);
1187   }
Fcvtl(const VRegister & vd,const VRegister & vn)1188   void Fcvtl(const VRegister& vd, const VRegister& vn) {
1189     VIXL_ASSERT(allow_macro_instructions_);
1190     SingleEmissionCheckScope guard(this);
1191     fcvtl(vd, vn);
1192   }
Fcvtl2(const VRegister & vd,const VRegister & vn)1193   void Fcvtl2(const VRegister& vd, const VRegister& vn) {
1194     VIXL_ASSERT(allow_macro_instructions_);
1195     SingleEmissionCheckScope guard(this);
1196     fcvtl2(vd, vn);
1197   }
Fcvtn(const VRegister & vd,const VRegister & vn)1198   void Fcvtn(const VRegister& vd, const VRegister& vn) {
1199     VIXL_ASSERT(allow_macro_instructions_);
1200     SingleEmissionCheckScope guard(this);
1201     fcvtn(vd, vn);
1202   }
Fcvtn2(const VRegister & vd,const VRegister & vn)1203   void Fcvtn2(const VRegister& vd, const VRegister& vn) {
1204     VIXL_ASSERT(allow_macro_instructions_);
1205     SingleEmissionCheckScope guard(this);
1206     fcvtn2(vd, vn);
1207   }
Fcvtxn(const VRegister & vd,const VRegister & vn)1208   void Fcvtxn(const VRegister& vd, const VRegister& vn) {
1209     VIXL_ASSERT(allow_macro_instructions_);
1210     SingleEmissionCheckScope guard(this);
1211     fcvtxn(vd, vn);
1212   }
Fcvtxn2(const VRegister & vd,const VRegister & vn)1213   void Fcvtxn2(const VRegister& vd, const VRegister& vn) {
1214     VIXL_ASSERT(allow_macro_instructions_);
1215     SingleEmissionCheckScope guard(this);
1216     fcvtxn2(vd, vn);
1217   }
Fcvtas(const Register & rd,const VRegister & vn)1218   void Fcvtas(const Register& rd, const VRegister& vn) {
1219     VIXL_ASSERT(allow_macro_instructions_);
1220     VIXL_ASSERT(!rd.IsZero());
1221     SingleEmissionCheckScope guard(this);
1222     fcvtas(rd, vn);
1223   }
Fcvtau(const Register & rd,const VRegister & vn)1224   void Fcvtau(const Register& rd, const VRegister& vn) {
1225     VIXL_ASSERT(allow_macro_instructions_);
1226     VIXL_ASSERT(!rd.IsZero());
1227     SingleEmissionCheckScope guard(this);
1228     fcvtau(rd, vn);
1229   }
Fcvtms(const Register & rd,const VRegister & vn)1230   void Fcvtms(const Register& rd, const VRegister& vn) {
1231     VIXL_ASSERT(allow_macro_instructions_);
1232     VIXL_ASSERT(!rd.IsZero());
1233     SingleEmissionCheckScope guard(this);
1234     fcvtms(rd, vn);
1235   }
Fcvtmu(const Register & rd,const VRegister & vn)1236   void Fcvtmu(const Register& rd, const VRegister& vn) {
1237     VIXL_ASSERT(allow_macro_instructions_);
1238     VIXL_ASSERT(!rd.IsZero());
1239     SingleEmissionCheckScope guard(this);
1240     fcvtmu(rd, vn);
1241   }
Fcvtns(const Register & rd,const VRegister & vn)1242   void Fcvtns(const Register& rd, const VRegister& vn) {
1243     VIXL_ASSERT(allow_macro_instructions_);
1244     VIXL_ASSERT(!rd.IsZero());
1245     SingleEmissionCheckScope guard(this);
1246     fcvtns(rd, vn);
1247   }
Fcvtnu(const Register & rd,const VRegister & vn)1248   void Fcvtnu(const Register& rd, const VRegister& vn) {
1249     VIXL_ASSERT(allow_macro_instructions_);
1250     VIXL_ASSERT(!rd.IsZero());
1251     SingleEmissionCheckScope guard(this);
1252     fcvtnu(rd, vn);
1253   }
Fcvtps(const Register & rd,const VRegister & vn)1254   void Fcvtps(const Register& rd, const VRegister& vn) {
1255     VIXL_ASSERT(allow_macro_instructions_);
1256     VIXL_ASSERT(!rd.IsZero());
1257     SingleEmissionCheckScope guard(this);
1258     fcvtps(rd, vn);
1259   }
Fcvtpu(const Register & rd,const VRegister & vn)1260   void Fcvtpu(const Register& rd, const VRegister& vn) {
1261     VIXL_ASSERT(allow_macro_instructions_);
1262     VIXL_ASSERT(!rd.IsZero());
1263     SingleEmissionCheckScope guard(this);
1264     fcvtpu(rd, vn);
1265   }
1266   void Fcvtzs(const Register& rd, const VRegister& vn, int fbits = 0) {
1267     VIXL_ASSERT(allow_macro_instructions_);
1268     VIXL_ASSERT(!rd.IsZero());
1269     SingleEmissionCheckScope guard(this);
1270     fcvtzs(rd, vn, fbits);
1271   }
1272   void Fcvtzu(const Register& rd, const VRegister& vn, int fbits = 0) {
1273     VIXL_ASSERT(allow_macro_instructions_);
1274     VIXL_ASSERT(!rd.IsZero());
1275     SingleEmissionCheckScope guard(this);
1276     fcvtzu(rd, vn, fbits);
1277   }
Fdiv(const VRegister & vd,const VRegister & vn,const VRegister & vm)1278   void Fdiv(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1279     VIXL_ASSERT(allow_macro_instructions_);
1280     SingleEmissionCheckScope guard(this);
1281     fdiv(vd, vn, vm);
1282   }
Fmax(const VRegister & vd,const VRegister & vn,const VRegister & vm)1283   void Fmax(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1284     VIXL_ASSERT(allow_macro_instructions_);
1285     SingleEmissionCheckScope guard(this);
1286     fmax(vd, vn, vm);
1287   }
Fmaxnm(const VRegister & vd,const VRegister & vn,const VRegister & vm)1288   void Fmaxnm(const VRegister& vd,
1289               const VRegister& vn,
1290               const VRegister& vm) {
1291     VIXL_ASSERT(allow_macro_instructions_);
1292     SingleEmissionCheckScope guard(this);
1293     fmaxnm(vd, vn, vm);
1294   }
Fmin(const VRegister & vd,const VRegister & vn,const VRegister & vm)1295   void Fmin(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1296     VIXL_ASSERT(allow_macro_instructions_);
1297     SingleEmissionCheckScope guard(this);
1298     fmin(vd, vn, vm);
1299   }
Fminnm(const VRegister & vd,const VRegister & vn,const VRegister & vm)1300   void Fminnm(const VRegister& vd,
1301               const VRegister& vn,
1302               const VRegister& vm) {
1303     VIXL_ASSERT(allow_macro_instructions_);
1304     SingleEmissionCheckScope guard(this);
1305     fminnm(vd, vn, vm);
1306   }
Fmov(VRegister vd,VRegister vn)1307   void Fmov(VRegister vd, VRegister vn) {
1308     VIXL_ASSERT(allow_macro_instructions_);
1309     SingleEmissionCheckScope guard(this);
1310     // Only emit an instruction if vd and vn are different, and they are both D
1311     // registers. fmov(s0, s0) is not a no-op because it clears the top word of
1312     // d0. Technically, fmov(d0, d0) is not a no-op either because it clears
1313     // the top of q0, but VRegister does not currently support Q registers.
1314     if (!vd.Is(vn) || !vd.Is64Bits()) {
1315       fmov(vd, vn);
1316     }
1317   }
Fmov(VRegister vd,Register rn)1318   void Fmov(VRegister vd, Register rn) {
1319     VIXL_ASSERT(allow_macro_instructions_);
1320     VIXL_ASSERT(!rn.IsZero());
1321     SingleEmissionCheckScope guard(this);
1322     fmov(vd, rn);
1323   }
Fmov(const VRegister & vd,int index,const Register & rn)1324   void Fmov(const VRegister& vd, int index, const Register& rn) {
1325     VIXL_ASSERT(allow_macro_instructions_);
1326     SingleEmissionCheckScope guard(this);
1327     fmov(vd, index, rn);
1328   }
Fmov(const Register & rd,const VRegister & vn,int index)1329   void Fmov(const Register& rd, const VRegister& vn, int index) {
1330     VIXL_ASSERT(allow_macro_instructions_);
1331     SingleEmissionCheckScope guard(this);
1332     fmov(rd, vn, index);
1333   }
1334 
1335   // Provide explicit double and float interfaces for FP immediate moves, rather
1336   // than relying on implicit C++ casts. This allows signalling NaNs to be
1337   // preserved when the immediate matches the format of vd. Most systems convert
1338   // signalling NaNs to quiet NaNs when converting between float and double.
1339   void Fmov(VRegister vd, double imm);
1340   void Fmov(VRegister vd, float imm);
1341   // Provide a template to allow other types to be converted automatically.
1342   template<typename T>
Fmov(VRegister vd,T imm)1343   void Fmov(VRegister vd, T imm) {
1344     VIXL_ASSERT(allow_macro_instructions_);
1345     Fmov(vd, static_cast<double>(imm));
1346   }
Fmov(Register rd,VRegister vn)1347   void Fmov(Register rd, VRegister vn) {
1348     VIXL_ASSERT(allow_macro_instructions_);
1349     VIXL_ASSERT(!rd.IsZero());
1350     SingleEmissionCheckScope guard(this);
1351     fmov(rd, vn);
1352   }
Fmul(const VRegister & vd,const VRegister & vn,const VRegister & vm)1353   void Fmul(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1354     VIXL_ASSERT(allow_macro_instructions_);
1355     SingleEmissionCheckScope guard(this);
1356     fmul(vd, vn, vm);
1357   }
Fnmul(const VRegister & vd,const VRegister & vn,const VRegister & vm)1358   void Fnmul(const VRegister& vd, const VRegister& vn,
1359              const VRegister& vm) {
1360     VIXL_ASSERT(allow_macro_instructions_);
1361     SingleEmissionCheckScope guard(this);
1362     fnmul(vd, vn, vm);
1363   }
Fmadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1364   void Fmadd(const VRegister& vd,
1365              const VRegister& vn,
1366              const VRegister& vm,
1367              const VRegister& va) {
1368     VIXL_ASSERT(allow_macro_instructions_);
1369     SingleEmissionCheckScope guard(this);
1370     fmadd(vd, vn, vm, va);
1371   }
Fmsub(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1372   void Fmsub(const VRegister& vd,
1373              const VRegister& vn,
1374              const VRegister& vm,
1375              const VRegister& va) {
1376     VIXL_ASSERT(allow_macro_instructions_);
1377     SingleEmissionCheckScope guard(this);
1378     fmsub(vd, vn, vm, va);
1379   }
Fnmadd(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1380   void Fnmadd(const VRegister& vd,
1381               const VRegister& vn,
1382               const VRegister& vm,
1383               const VRegister& va) {
1384     VIXL_ASSERT(allow_macro_instructions_);
1385     SingleEmissionCheckScope guard(this);
1386     fnmadd(vd, vn, vm, va);
1387   }
Fnmsub(const VRegister & vd,const VRegister & vn,const VRegister & vm,const VRegister & va)1388   void Fnmsub(const VRegister& vd,
1389               const VRegister& vn,
1390               const VRegister& vm,
1391               const VRegister& va) {
1392     VIXL_ASSERT(allow_macro_instructions_);
1393     SingleEmissionCheckScope guard(this);
1394     fnmsub(vd, vn, vm, va);
1395   }
Fsub(const VRegister & vd,const VRegister & vn,const VRegister & vm)1396   void Fsub(const VRegister& vd, const VRegister& vn, const VRegister& vm) {
1397     VIXL_ASSERT(allow_macro_instructions_);
1398     SingleEmissionCheckScope guard(this);
1399     fsub(vd, vn, vm);
1400   }
Hint(SystemHint code)1401   void Hint(SystemHint code) {
1402     VIXL_ASSERT(allow_macro_instructions_);
1403     SingleEmissionCheckScope guard(this);
1404     hint(code);
1405   }
Hlt(int code)1406   void Hlt(int code) {
1407     VIXL_ASSERT(allow_macro_instructions_);
1408     SingleEmissionCheckScope guard(this);
1409     hlt(code);
1410   }
Isb()1411   void Isb() {
1412     VIXL_ASSERT(allow_macro_instructions_);
1413     SingleEmissionCheckScope guard(this);
1414     isb();
1415   }
Ldar(const Register & rt,const MemOperand & src)1416   void Ldar(const Register& rt, const MemOperand& src) {
1417     VIXL_ASSERT(allow_macro_instructions_);
1418     SingleEmissionCheckScope guard(this);
1419     ldar(rt, src);
1420   }
Ldarb(const Register & rt,const MemOperand & src)1421   void Ldarb(const Register& rt, const MemOperand& src) {
1422     VIXL_ASSERT(allow_macro_instructions_);
1423     SingleEmissionCheckScope guard(this);
1424     ldarb(rt, src);
1425   }
Ldarh(const Register & rt,const MemOperand & src)1426   void Ldarh(const Register& rt, const MemOperand& src) {
1427     VIXL_ASSERT(allow_macro_instructions_);
1428     SingleEmissionCheckScope guard(this);
1429     ldarh(rt, src);
1430   }
Ldaxp(const Register & rt,const Register & rt2,const MemOperand & src)1431   void Ldaxp(const Register& rt, const Register& rt2, const MemOperand& src) {
1432     VIXL_ASSERT(allow_macro_instructions_);
1433     VIXL_ASSERT(!rt.Aliases(rt2));
1434     SingleEmissionCheckScope guard(this);
1435     ldaxp(rt, rt2, src);
1436   }
Ldaxr(const Register & rt,const MemOperand & src)1437   void Ldaxr(const Register& rt, const MemOperand& src) {
1438     VIXL_ASSERT(allow_macro_instructions_);
1439     SingleEmissionCheckScope guard(this);
1440     ldaxr(rt, src);
1441   }
Ldaxrb(const Register & rt,const MemOperand & src)1442   void Ldaxrb(const Register& rt, const MemOperand& src) {
1443     VIXL_ASSERT(allow_macro_instructions_);
1444     SingleEmissionCheckScope guard(this);
1445     ldaxrb(rt, src);
1446   }
Ldaxrh(const Register & rt,const MemOperand & src)1447   void Ldaxrh(const Register& rt, const MemOperand& src) {
1448     VIXL_ASSERT(allow_macro_instructions_);
1449     SingleEmissionCheckScope guard(this);
1450     ldaxrh(rt, src);
1451   }
Ldnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & src)1452   void Ldnp(const CPURegister& rt,
1453             const CPURegister& rt2,
1454             const MemOperand& src) {
1455     VIXL_ASSERT(allow_macro_instructions_);
1456     SingleEmissionCheckScope guard(this);
1457     ldnp(rt, rt2, src);
1458   }
1459   // Provide both double and float interfaces for FP immediate loads, rather
1460   // than relying on implicit C++ casts. This allows signalling NaNs to be
1461   // preserved when the immediate matches the format of fd. Most systems convert
1462   // signalling NaNs to quiet NaNs when converting between float and double.
Ldr(const VRegister & vt,double imm)1463   void Ldr(const VRegister& vt, double imm) {
1464     VIXL_ASSERT(allow_macro_instructions_);
1465     SingleEmissionCheckScope guard(this);
1466     RawLiteral* literal;
1467     if (vt.IsD()) {
1468       literal = literal_pool_.Add(imm);
1469     } else {
1470       literal = literal_pool_.Add(static_cast<float>(imm));
1471     }
1472     ldr(vt, literal);
1473   }
Ldr(const VRegister & vt,float imm)1474   void Ldr(const VRegister& vt, float imm) {
1475     VIXL_ASSERT(allow_macro_instructions_);
1476     SingleEmissionCheckScope guard(this);
1477     RawLiteral* literal;
1478     if (vt.IsS()) {
1479       literal = literal_pool_.Add(imm);
1480     } else {
1481       literal = literal_pool_.Add(static_cast<double>(imm));
1482     }
1483     ldr(vt, literal);
1484   }
Ldr(const VRegister & vt,uint64_t high64,uint64_t low64)1485   void Ldr(const VRegister& vt, uint64_t high64, uint64_t low64) {
1486     VIXL_ASSERT(allow_macro_instructions_);
1487     VIXL_ASSERT(vt.IsQ());
1488     SingleEmissionCheckScope guard(this);
1489     ldr(vt, literal_pool_.Add(high64, low64));
1490   }
Ldr(const Register & rt,uint64_t imm)1491   void Ldr(const Register& rt, uint64_t imm) {
1492     VIXL_ASSERT(allow_macro_instructions_);
1493     VIXL_ASSERT(!rt.IsZero());
1494     SingleEmissionCheckScope guard(this);
1495     RawLiteral* literal;
1496     if (rt.Is64Bits()) {
1497       literal = literal_pool_.Add(imm);
1498     } else {
1499       VIXL_ASSERT(rt.Is32Bits());
1500       VIXL_ASSERT(is_uint32(imm) || is_int32(imm));
1501       literal = literal_pool_.Add(static_cast<uint32_t>(imm));
1502     }
1503     ldr(rt, literal);
1504   }
Ldrsw(const Register & rt,uint32_t imm)1505   void Ldrsw(const Register& rt, uint32_t imm) {
1506     VIXL_ASSERT(allow_macro_instructions_);
1507     VIXL_ASSERT(!rt.IsZero());
1508     SingleEmissionCheckScope guard(this);
1509     RawLiteral* literal = literal_pool_.Add(imm);
1510     ldrsw(rt, literal);
1511   }
Ldxp(const Register & rt,const Register & rt2,const MemOperand & src)1512   void Ldxp(const Register& rt, const Register& rt2, const MemOperand& src) {
1513     VIXL_ASSERT(allow_macro_instructions_);
1514     VIXL_ASSERT(!rt.Aliases(rt2));
1515     SingleEmissionCheckScope guard(this);
1516     ldxp(rt, rt2, src);
1517   }
Ldxr(const Register & rt,const MemOperand & src)1518   void Ldxr(const Register& rt, const MemOperand& src) {
1519     VIXL_ASSERT(allow_macro_instructions_);
1520     SingleEmissionCheckScope guard(this);
1521     ldxr(rt, src);
1522   }
Ldxrb(const Register & rt,const MemOperand & src)1523   void Ldxrb(const Register& rt, const MemOperand& src) {
1524     VIXL_ASSERT(allow_macro_instructions_);
1525     SingleEmissionCheckScope guard(this);
1526     ldxrb(rt, src);
1527   }
Ldxrh(const Register & rt,const MemOperand & src)1528   void Ldxrh(const Register& rt, const MemOperand& src) {
1529     VIXL_ASSERT(allow_macro_instructions_);
1530     SingleEmissionCheckScope guard(this);
1531     ldxrh(rt, src);
1532   }
Lsl(const Register & rd,const Register & rn,unsigned shift)1533   void Lsl(const Register& rd, const Register& rn, unsigned shift) {
1534     VIXL_ASSERT(allow_macro_instructions_);
1535     VIXL_ASSERT(!rd.IsZero());
1536     VIXL_ASSERT(!rn.IsZero());
1537     SingleEmissionCheckScope guard(this);
1538     lsl(rd, rn, shift);
1539   }
Lsl(const Register & rd,const Register & rn,const Register & rm)1540   void Lsl(const Register& rd, const Register& rn, const Register& rm) {
1541     VIXL_ASSERT(allow_macro_instructions_);
1542     VIXL_ASSERT(!rd.IsZero());
1543     VIXL_ASSERT(!rn.IsZero());
1544     VIXL_ASSERT(!rm.IsZero());
1545     SingleEmissionCheckScope guard(this);
1546     lslv(rd, rn, rm);
1547   }
Lsr(const Register & rd,const Register & rn,unsigned shift)1548   void Lsr(const Register& rd, const Register& rn, unsigned shift) {
1549     VIXL_ASSERT(allow_macro_instructions_);
1550     VIXL_ASSERT(!rd.IsZero());
1551     VIXL_ASSERT(!rn.IsZero());
1552     SingleEmissionCheckScope guard(this);
1553     lsr(rd, rn, shift);
1554   }
Lsr(const Register & rd,const Register & rn,const Register & rm)1555   void Lsr(const Register& rd, const Register& rn, const Register& rm) {
1556     VIXL_ASSERT(allow_macro_instructions_);
1557     VIXL_ASSERT(!rd.IsZero());
1558     VIXL_ASSERT(!rn.IsZero());
1559     VIXL_ASSERT(!rm.IsZero());
1560     SingleEmissionCheckScope guard(this);
1561     lsrv(rd, rn, rm);
1562   }
Madd(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1563   void Madd(const Register& rd,
1564             const Register& rn,
1565             const Register& rm,
1566             const Register& ra) {
1567     VIXL_ASSERT(allow_macro_instructions_);
1568     VIXL_ASSERT(!rd.IsZero());
1569     VIXL_ASSERT(!rn.IsZero());
1570     VIXL_ASSERT(!rm.IsZero());
1571     VIXL_ASSERT(!ra.IsZero());
1572     SingleEmissionCheckScope guard(this);
1573     madd(rd, rn, rm, ra);
1574   }
Mneg(const Register & rd,const Register & rn,const Register & rm)1575   void Mneg(const Register& rd, const Register& rn, const Register& rm) {
1576     VIXL_ASSERT(allow_macro_instructions_);
1577     VIXL_ASSERT(!rd.IsZero());
1578     VIXL_ASSERT(!rn.IsZero());
1579     VIXL_ASSERT(!rm.IsZero());
1580     SingleEmissionCheckScope guard(this);
1581     mneg(rd, rn, rm);
1582   }
Mov(const Register & rd,const Register & rn)1583   void Mov(const Register& rd, const Register& rn) {
1584     VIXL_ASSERT(allow_macro_instructions_);
1585     SingleEmissionCheckScope guard(this);
1586     mov(rd, rn);
1587   }
1588   void Movk(const Register& rd, uint64_t imm, int shift = -1) {
1589     VIXL_ASSERT(allow_macro_instructions_);
1590     VIXL_ASSERT(!rd.IsZero());
1591     SingleEmissionCheckScope guard(this);
1592     movk(rd, imm, shift);
1593   }
Mrs(const Register & rt,SystemRegister sysreg)1594   void Mrs(const Register& rt, SystemRegister sysreg) {
1595     VIXL_ASSERT(allow_macro_instructions_);
1596     VIXL_ASSERT(!rt.IsZero());
1597     SingleEmissionCheckScope guard(this);
1598     mrs(rt, sysreg);
1599   }
Msr(SystemRegister sysreg,const Register & rt)1600   void Msr(SystemRegister sysreg, const Register& rt) {
1601     VIXL_ASSERT(allow_macro_instructions_);
1602     VIXL_ASSERT(!rt.IsZero());
1603     SingleEmissionCheckScope guard(this);
1604     msr(sysreg, rt);
1605   }
1606   void Sys(int op1, int crn, int crm, int op2, const Register& rt = xzr) {
1607     VIXL_ASSERT(allow_macro_instructions_);
1608     SingleEmissionCheckScope guard(this);
1609     sys(op1, crn, crm, op2, rt);
1610   }
Dc(DataCacheOp op,const Register & rt)1611   void Dc(DataCacheOp op, const Register& rt) {
1612     VIXL_ASSERT(allow_macro_instructions_);
1613     SingleEmissionCheckScope guard(this);
1614     dc(op, rt);
1615   }
Ic(InstructionCacheOp op,const Register & rt)1616   void Ic(InstructionCacheOp op, const Register& rt) {
1617     VIXL_ASSERT(allow_macro_instructions_);
1618     SingleEmissionCheckScope guard(this);
1619     ic(op, rt);
1620   }
Msub(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1621   void Msub(const Register& rd,
1622             const Register& rn,
1623             const Register& rm,
1624             const Register& ra) {
1625     VIXL_ASSERT(allow_macro_instructions_);
1626     VIXL_ASSERT(!rd.IsZero());
1627     VIXL_ASSERT(!rn.IsZero());
1628     VIXL_ASSERT(!rm.IsZero());
1629     VIXL_ASSERT(!ra.IsZero());
1630     SingleEmissionCheckScope guard(this);
1631     msub(rd, rn, rm, ra);
1632   }
Mul(const Register & rd,const Register & rn,const Register & rm)1633   void Mul(const Register& rd, const Register& rn, const Register& rm) {
1634     VIXL_ASSERT(allow_macro_instructions_);
1635     VIXL_ASSERT(!rd.IsZero());
1636     VIXL_ASSERT(!rn.IsZero());
1637     VIXL_ASSERT(!rm.IsZero());
1638     SingleEmissionCheckScope guard(this);
1639     mul(rd, rn, rm);
1640   }
Nop()1641   void Nop() {
1642     VIXL_ASSERT(allow_macro_instructions_);
1643     SingleEmissionCheckScope guard(this);
1644     nop();
1645   }
Rbit(const Register & rd,const Register & rn)1646   void Rbit(const Register& rd, const Register& rn) {
1647     VIXL_ASSERT(allow_macro_instructions_);
1648     VIXL_ASSERT(!rd.IsZero());
1649     VIXL_ASSERT(!rn.IsZero());
1650     SingleEmissionCheckScope guard(this);
1651     rbit(rd, rn);
1652   }
1653   void Ret(const Register& xn = lr) {
1654     VIXL_ASSERT(allow_macro_instructions_);
1655     VIXL_ASSERT(!xn.IsZero());
1656     SingleEmissionCheckScope guard(this);
1657     ret(xn);
1658   }
Rev(const Register & rd,const Register & rn)1659   void Rev(const Register& rd, const Register& rn) {
1660     VIXL_ASSERT(allow_macro_instructions_);
1661     VIXL_ASSERT(!rd.IsZero());
1662     VIXL_ASSERT(!rn.IsZero());
1663     SingleEmissionCheckScope guard(this);
1664     rev(rd, rn);
1665   }
Rev16(const Register & rd,const Register & rn)1666   void Rev16(const Register& rd, const Register& rn) {
1667     VIXL_ASSERT(allow_macro_instructions_);
1668     VIXL_ASSERT(!rd.IsZero());
1669     VIXL_ASSERT(!rn.IsZero());
1670     SingleEmissionCheckScope guard(this);
1671     rev16(rd, rn);
1672   }
Rev32(const Register & rd,const Register & rn)1673   void Rev32(const Register& rd, const Register& rn) {
1674     VIXL_ASSERT(allow_macro_instructions_);
1675     VIXL_ASSERT(!rd.IsZero());
1676     VIXL_ASSERT(!rn.IsZero());
1677     SingleEmissionCheckScope guard(this);
1678     rev32(rd, rn);
1679   }
Ror(const Register & rd,const Register & rs,unsigned shift)1680   void Ror(const Register& rd, const Register& rs, unsigned shift) {
1681     VIXL_ASSERT(allow_macro_instructions_);
1682     VIXL_ASSERT(!rd.IsZero());
1683     VIXL_ASSERT(!rs.IsZero());
1684     SingleEmissionCheckScope guard(this);
1685     ror(rd, rs, shift);
1686   }
Ror(const Register & rd,const Register & rn,const Register & rm)1687   void Ror(const Register& rd, const Register& rn, const Register& rm) {
1688     VIXL_ASSERT(allow_macro_instructions_);
1689     VIXL_ASSERT(!rd.IsZero());
1690     VIXL_ASSERT(!rn.IsZero());
1691     VIXL_ASSERT(!rm.IsZero());
1692     SingleEmissionCheckScope guard(this);
1693     rorv(rd, rn, rm);
1694   }
Sbfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1695   void Sbfiz(const Register& rd,
1696              const Register& rn,
1697              unsigned lsb,
1698              unsigned width) {
1699     VIXL_ASSERT(allow_macro_instructions_);
1700     VIXL_ASSERT(!rd.IsZero());
1701     VIXL_ASSERT(!rn.IsZero());
1702     SingleEmissionCheckScope guard(this);
1703     sbfiz(rd, rn, lsb, width);
1704   }
Sbfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)1705   void Sbfm(const Register& rd,
1706             const Register& rn,
1707             unsigned immr,
1708             unsigned imms) {
1709     VIXL_ASSERT(allow_macro_instructions_);
1710     VIXL_ASSERT(!rd.IsZero());
1711     VIXL_ASSERT(!rn.IsZero());
1712     SingleEmissionCheckScope guard(this);
1713     sbfm(rd, rn, immr, imms);
1714   }
Sbfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1715   void Sbfx(const Register& rd,
1716             const Register& rn,
1717             unsigned lsb,
1718             unsigned width) {
1719     VIXL_ASSERT(allow_macro_instructions_);
1720     VIXL_ASSERT(!rd.IsZero());
1721     VIXL_ASSERT(!rn.IsZero());
1722     SingleEmissionCheckScope guard(this);
1723     sbfx(rd, rn, lsb, width);
1724   }
1725   void Scvtf(const VRegister& vd, const Register& rn, int fbits = 0) {
1726     VIXL_ASSERT(allow_macro_instructions_);
1727     VIXL_ASSERT(!rn.IsZero());
1728     SingleEmissionCheckScope guard(this);
1729     scvtf(vd, rn, fbits);
1730   }
Sdiv(const Register & rd,const Register & rn,const Register & rm)1731   void Sdiv(const Register& rd, const Register& rn, const Register& rm) {
1732     VIXL_ASSERT(allow_macro_instructions_);
1733     VIXL_ASSERT(!rd.IsZero());
1734     VIXL_ASSERT(!rn.IsZero());
1735     VIXL_ASSERT(!rm.IsZero());
1736     SingleEmissionCheckScope guard(this);
1737     sdiv(rd, rn, rm);
1738   }
Smaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1739   void Smaddl(const Register& rd,
1740               const Register& rn,
1741               const Register& rm,
1742               const Register& ra) {
1743     VIXL_ASSERT(allow_macro_instructions_);
1744     VIXL_ASSERT(!rd.IsZero());
1745     VIXL_ASSERT(!rn.IsZero());
1746     VIXL_ASSERT(!rm.IsZero());
1747     VIXL_ASSERT(!ra.IsZero());
1748     SingleEmissionCheckScope guard(this);
1749     smaddl(rd, rn, rm, ra);
1750   }
Smsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)1751   void Smsubl(const Register& rd,
1752               const Register& rn,
1753               const Register& rm,
1754               const Register& ra) {
1755     VIXL_ASSERT(allow_macro_instructions_);
1756     VIXL_ASSERT(!rd.IsZero());
1757     VIXL_ASSERT(!rn.IsZero());
1758     VIXL_ASSERT(!rm.IsZero());
1759     VIXL_ASSERT(!ra.IsZero());
1760     SingleEmissionCheckScope guard(this);
1761     smsubl(rd, rn, rm, ra);
1762   }
Smull(const Register & rd,const Register & rn,const Register & rm)1763   void Smull(const Register& rd, const Register& rn, const Register& rm) {
1764     VIXL_ASSERT(allow_macro_instructions_);
1765     VIXL_ASSERT(!rd.IsZero());
1766     VIXL_ASSERT(!rn.IsZero());
1767     VIXL_ASSERT(!rm.IsZero());
1768     SingleEmissionCheckScope guard(this);
1769     smull(rd, rn, rm);
1770   }
Smulh(const Register & xd,const Register & xn,const Register & xm)1771   void Smulh(const Register& xd, const Register& xn, const Register& xm) {
1772     VIXL_ASSERT(allow_macro_instructions_);
1773     VIXL_ASSERT(!xd.IsZero());
1774     VIXL_ASSERT(!xn.IsZero());
1775     VIXL_ASSERT(!xm.IsZero());
1776     SingleEmissionCheckScope guard(this);
1777     smulh(xd, xn, xm);
1778   }
Stlr(const Register & rt,const MemOperand & dst)1779   void Stlr(const Register& rt, const MemOperand& dst) {
1780     VIXL_ASSERT(allow_macro_instructions_);
1781     SingleEmissionCheckScope guard(this);
1782     stlr(rt, dst);
1783   }
Stlrb(const Register & rt,const MemOperand & dst)1784   void Stlrb(const Register& rt, const MemOperand& dst) {
1785     VIXL_ASSERT(allow_macro_instructions_);
1786     SingleEmissionCheckScope guard(this);
1787     stlrb(rt, dst);
1788   }
Stlrh(const Register & rt,const MemOperand & dst)1789   void Stlrh(const Register& rt, const MemOperand& dst) {
1790     VIXL_ASSERT(allow_macro_instructions_);
1791     SingleEmissionCheckScope guard(this);
1792     stlrh(rt, dst);
1793   }
Stlxp(const Register & rs,const Register & rt,const Register & rt2,const MemOperand & dst)1794   void Stlxp(const Register& rs,
1795              const Register& rt,
1796              const Register& rt2,
1797              const MemOperand& dst) {
1798     VIXL_ASSERT(allow_macro_instructions_);
1799     VIXL_ASSERT(!rs.Aliases(dst.base()));
1800     VIXL_ASSERT(!rs.Aliases(rt));
1801     VIXL_ASSERT(!rs.Aliases(rt2));
1802     SingleEmissionCheckScope guard(this);
1803     stlxp(rs, rt, rt2, dst);
1804   }
Stlxr(const Register & rs,const Register & rt,const MemOperand & dst)1805   void Stlxr(const Register& rs, const Register& rt, const MemOperand& dst) {
1806     VIXL_ASSERT(allow_macro_instructions_);
1807     VIXL_ASSERT(!rs.Aliases(dst.base()));
1808     VIXL_ASSERT(!rs.Aliases(rt));
1809     SingleEmissionCheckScope guard(this);
1810     stlxr(rs, rt, dst);
1811   }
Stlxrb(const Register & rs,const Register & rt,const MemOperand & dst)1812   void Stlxrb(const Register& rs, const Register& rt, const MemOperand& dst) {
1813     VIXL_ASSERT(allow_macro_instructions_);
1814     VIXL_ASSERT(!rs.Aliases(dst.base()));
1815     VIXL_ASSERT(!rs.Aliases(rt));
1816     SingleEmissionCheckScope guard(this);
1817     stlxrb(rs, rt, dst);
1818   }
Stlxrh(const Register & rs,const Register & rt,const MemOperand & dst)1819   void Stlxrh(const Register& rs, const Register& rt, const MemOperand& dst) {
1820     VIXL_ASSERT(allow_macro_instructions_);
1821     VIXL_ASSERT(!rs.Aliases(dst.base()));
1822     VIXL_ASSERT(!rs.Aliases(rt));
1823     SingleEmissionCheckScope guard(this);
1824     stlxrh(rs, rt, dst);
1825   }
Stnp(const CPURegister & rt,const CPURegister & rt2,const MemOperand & dst)1826   void Stnp(const CPURegister& rt,
1827             const CPURegister& rt2,
1828             const MemOperand& dst) {
1829     VIXL_ASSERT(allow_macro_instructions_);
1830     SingleEmissionCheckScope guard(this);
1831     stnp(rt, rt2, dst);
1832   }
Stxp(const Register & rs,const Register & rt,const Register & rt2,const MemOperand & dst)1833   void Stxp(const Register& rs,
1834             const Register& rt,
1835             const Register& rt2,
1836             const MemOperand& dst) {
1837     VIXL_ASSERT(allow_macro_instructions_);
1838     VIXL_ASSERT(!rs.Aliases(dst.base()));
1839     VIXL_ASSERT(!rs.Aliases(rt));
1840     VIXL_ASSERT(!rs.Aliases(rt2));
1841     SingleEmissionCheckScope guard(this);
1842     stxp(rs, rt, rt2, dst);
1843   }
Stxr(const Register & rs,const Register & rt,const MemOperand & dst)1844   void Stxr(const Register& rs, const Register& rt, const MemOperand& dst) {
1845     VIXL_ASSERT(allow_macro_instructions_);
1846     VIXL_ASSERT(!rs.Aliases(dst.base()));
1847     VIXL_ASSERT(!rs.Aliases(rt));
1848     SingleEmissionCheckScope guard(this);
1849     stxr(rs, rt, dst);
1850   }
Stxrb(const Register & rs,const Register & rt,const MemOperand & dst)1851   void Stxrb(const Register& rs, const Register& rt, const MemOperand& dst) {
1852     VIXL_ASSERT(allow_macro_instructions_);
1853     VIXL_ASSERT(!rs.Aliases(dst.base()));
1854     VIXL_ASSERT(!rs.Aliases(rt));
1855     SingleEmissionCheckScope guard(this);
1856     stxrb(rs, rt, dst);
1857   }
Stxrh(const Register & rs,const Register & rt,const MemOperand & dst)1858   void Stxrh(const Register& rs, const Register& rt, const MemOperand& dst) {
1859     VIXL_ASSERT(allow_macro_instructions_);
1860     VIXL_ASSERT(!rs.Aliases(dst.base()));
1861     VIXL_ASSERT(!rs.Aliases(rt));
1862     SingleEmissionCheckScope guard(this);
1863     stxrh(rs, rt, dst);
1864   }
Svc(int code)1865   void Svc(int code) {
1866     VIXL_ASSERT(allow_macro_instructions_);
1867     SingleEmissionCheckScope guard(this);
1868     svc(code);
1869   }
Sxtb(const Register & rd,const Register & rn)1870   void Sxtb(const Register& rd, const Register& rn) {
1871     VIXL_ASSERT(allow_macro_instructions_);
1872     VIXL_ASSERT(!rd.IsZero());
1873     VIXL_ASSERT(!rn.IsZero());
1874     SingleEmissionCheckScope guard(this);
1875     sxtb(rd, rn);
1876   }
Sxth(const Register & rd,const Register & rn)1877   void Sxth(const Register& rd, const Register& rn) {
1878     VIXL_ASSERT(allow_macro_instructions_);
1879     VIXL_ASSERT(!rd.IsZero());
1880     VIXL_ASSERT(!rn.IsZero());
1881     SingleEmissionCheckScope guard(this);
1882     sxth(rd, rn);
1883   }
Sxtw(const Register & rd,const Register & rn)1884   void Sxtw(const Register& rd, const Register& rn) {
1885     VIXL_ASSERT(allow_macro_instructions_);
1886     VIXL_ASSERT(!rd.IsZero());
1887     VIXL_ASSERT(!rn.IsZero());
1888     SingleEmissionCheckScope guard(this);
1889     sxtw(rd, rn);
1890   }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vm)1891   void Tbl(const VRegister& vd,
1892            const VRegister& vn,
1893            const VRegister& vm) {
1894     VIXL_ASSERT(allow_macro_instructions_);
1895     SingleEmissionCheckScope guard(this);
1896     tbl(vd, vn, vm);
1897   }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vm)1898   void Tbl(const VRegister& vd,
1899            const VRegister& vn,
1900            const VRegister& vn2,
1901            const VRegister& vm) {
1902     VIXL_ASSERT(allow_macro_instructions_);
1903     SingleEmissionCheckScope guard(this);
1904     tbl(vd, vn, vn2, vm);
1905   }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vm)1906   void Tbl(const VRegister& vd,
1907            const VRegister& vn,
1908            const VRegister& vn2,
1909            const VRegister& vn3,
1910            const VRegister& vm) {
1911     VIXL_ASSERT(allow_macro_instructions_);
1912     SingleEmissionCheckScope guard(this);
1913     tbl(vd, vn, vn2, vn3, vm);
1914   }
Tbl(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vn4,const VRegister & vm)1915   void Tbl(const VRegister& vd,
1916            const VRegister& vn,
1917            const VRegister& vn2,
1918            const VRegister& vn3,
1919            const VRegister& vn4,
1920            const VRegister& vm) {
1921     VIXL_ASSERT(allow_macro_instructions_);
1922     SingleEmissionCheckScope guard(this);
1923     tbl(vd, vn, vn2, vn3, vn4, vm);
1924   }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vm)1925   void Tbx(const VRegister& vd,
1926            const VRegister& vn,
1927            const VRegister& vm) {
1928     VIXL_ASSERT(allow_macro_instructions_);
1929     SingleEmissionCheckScope guard(this);
1930     tbx(vd, vn, vm);
1931   }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vm)1932   void Tbx(const VRegister& vd,
1933            const VRegister& vn,
1934            const VRegister& vn2,
1935            const VRegister& vm) {
1936     VIXL_ASSERT(allow_macro_instructions_);
1937     SingleEmissionCheckScope guard(this);
1938     tbx(vd, vn, vn2, vm);
1939   }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vm)1940   void Tbx(const VRegister& vd,
1941            const VRegister& vn,
1942            const VRegister& vn2,
1943            const VRegister& vn3,
1944            const VRegister& vm) {
1945     VIXL_ASSERT(allow_macro_instructions_);
1946     SingleEmissionCheckScope guard(this);
1947     tbx(vd, vn, vn2, vn3, vm);
1948   }
Tbx(const VRegister & vd,const VRegister & vn,const VRegister & vn2,const VRegister & vn3,const VRegister & vn4,const VRegister & vm)1949   void Tbx(const VRegister& vd,
1950            const VRegister& vn,
1951            const VRegister& vn2,
1952            const VRegister& vn3,
1953            const VRegister& vn4,
1954            const VRegister& vm) {
1955     VIXL_ASSERT(allow_macro_instructions_);
1956     SingleEmissionCheckScope guard(this);
1957     tbx(vd, vn, vn2, vn3, vn4, vm);
1958   }
1959   void Tbnz(const Register& rt, unsigned bit_pos, Label* label);
1960   void Tbz(const Register& rt, unsigned bit_pos, Label* label);
Ubfiz(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1961   void Ubfiz(const Register& rd,
1962              const Register& rn,
1963              unsigned lsb,
1964              unsigned width) {
1965     VIXL_ASSERT(allow_macro_instructions_);
1966     VIXL_ASSERT(!rd.IsZero());
1967     VIXL_ASSERT(!rn.IsZero());
1968     SingleEmissionCheckScope guard(this);
1969     ubfiz(rd, rn, lsb, width);
1970   }
Ubfm(const Register & rd,const Register & rn,unsigned immr,unsigned imms)1971   void Ubfm(const Register& rd,
1972             const Register& rn,
1973             unsigned immr,
1974             unsigned imms) {
1975     VIXL_ASSERT(allow_macro_instructions_);
1976     VIXL_ASSERT(!rd.IsZero());
1977     VIXL_ASSERT(!rn.IsZero());
1978     SingleEmissionCheckScope guard(this);
1979     ubfm(rd, rn, immr, imms);
1980   }
Ubfx(const Register & rd,const Register & rn,unsigned lsb,unsigned width)1981   void Ubfx(const Register& rd,
1982             const Register& rn,
1983             unsigned lsb,
1984             unsigned width) {
1985     VIXL_ASSERT(allow_macro_instructions_);
1986     VIXL_ASSERT(!rd.IsZero());
1987     VIXL_ASSERT(!rn.IsZero());
1988     SingleEmissionCheckScope guard(this);
1989     ubfx(rd, rn, lsb, width);
1990   }
1991   void Ucvtf(const VRegister& vd, const Register& rn, int fbits = 0) {
1992     VIXL_ASSERT(allow_macro_instructions_);
1993     VIXL_ASSERT(!rn.IsZero());
1994     SingleEmissionCheckScope guard(this);
1995     ucvtf(vd, rn, fbits);
1996   }
Udiv(const Register & rd,const Register & rn,const Register & rm)1997   void Udiv(const Register& rd, const Register& rn, const Register& rm) {
1998     VIXL_ASSERT(allow_macro_instructions_);
1999     VIXL_ASSERT(!rd.IsZero());
2000     VIXL_ASSERT(!rn.IsZero());
2001     VIXL_ASSERT(!rm.IsZero());
2002     SingleEmissionCheckScope guard(this);
2003     udiv(rd, rn, rm);
2004   }
Umaddl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2005   void Umaddl(const Register& rd,
2006               const Register& rn,
2007               const Register& rm,
2008               const Register& ra) {
2009     VIXL_ASSERT(allow_macro_instructions_);
2010     VIXL_ASSERT(!rd.IsZero());
2011     VIXL_ASSERT(!rn.IsZero());
2012     VIXL_ASSERT(!rm.IsZero());
2013     VIXL_ASSERT(!ra.IsZero());
2014     SingleEmissionCheckScope guard(this);
2015     umaddl(rd, rn, rm, ra);
2016   }
Umull(const Register & rd,const Register & rn,const Register & rm)2017   void Umull(const Register& rd,
2018              const Register& rn,
2019              const Register& rm) {
2020     VIXL_ASSERT(allow_macro_instructions_);
2021     VIXL_ASSERT(!rd.IsZero());
2022     VIXL_ASSERT(!rn.IsZero());
2023     VIXL_ASSERT(!rm.IsZero());
2024     SingleEmissionCheckScope guard(this);
2025     umull(rd, rn, rm);
2026   }
Umulh(const Register & xd,const Register & xn,const Register & xm)2027   void Umulh(const Register& xd, const Register& xn, const Register& xm) {
2028     VIXL_ASSERT(allow_macro_instructions_);
2029     VIXL_ASSERT(!xd.IsZero());
2030     VIXL_ASSERT(!xn.IsZero());
2031     VIXL_ASSERT(!xm.IsZero());
2032     SingleEmissionCheckScope guard(this);
2033     umulh(xd, xn, xm);
2034   }
Umsubl(const Register & rd,const Register & rn,const Register & rm,const Register & ra)2035   void Umsubl(const Register& rd,
2036               const Register& rn,
2037               const Register& rm,
2038               const Register& ra) {
2039     VIXL_ASSERT(allow_macro_instructions_);
2040     VIXL_ASSERT(!rd.IsZero());
2041     VIXL_ASSERT(!rn.IsZero());
2042     VIXL_ASSERT(!rm.IsZero());
2043     VIXL_ASSERT(!ra.IsZero());
2044     SingleEmissionCheckScope guard(this);
2045     umsubl(rd, rn, rm, ra);
2046   }
Unreachable()2047   void Unreachable() {
2048     VIXL_ASSERT(allow_macro_instructions_);
2049     SingleEmissionCheckScope guard(this);
2050 #ifdef USE_SIMULATOR
2051     hlt(kUnreachableOpcode);
2052 #else
2053     // Branch to 0 to generate a segfault.
2054     // lr - kInstructionSize is the address of the offending instruction.
2055     blr(xzr);
2056 #endif
2057   }
Uxtb(const Register & rd,const Register & rn)2058   void Uxtb(const Register& rd, const Register& rn) {
2059     VIXL_ASSERT(allow_macro_instructions_);
2060     VIXL_ASSERT(!rd.IsZero());
2061     VIXL_ASSERT(!rn.IsZero());
2062     SingleEmissionCheckScope guard(this);
2063     uxtb(rd, rn);
2064   }
Uxth(const Register & rd,const Register & rn)2065   void Uxth(const Register& rd, const Register& rn) {
2066     VIXL_ASSERT(allow_macro_instructions_);
2067     VIXL_ASSERT(!rd.IsZero());
2068     VIXL_ASSERT(!rn.IsZero());
2069     SingleEmissionCheckScope guard(this);
2070     uxth(rd, rn);
2071   }
Uxtw(const Register & rd,const Register & rn)2072   void Uxtw(const Register& rd, const Register& rn) {
2073     VIXL_ASSERT(allow_macro_instructions_);
2074     VIXL_ASSERT(!rd.IsZero());
2075     VIXL_ASSERT(!rn.IsZero());
2076     SingleEmissionCheckScope guard(this);
2077     uxtw(rd, rn);
2078   }
2079 
2080   // NEON 3 vector register instructions.
2081   #define NEON_3VREG_MACRO_LIST(V) \
2082     V(add, Add)                    \
2083     V(addhn, Addhn)                \
2084     V(addhn2, Addhn2)              \
2085     V(addp, Addp)                  \
2086     V(and_, And)                   \
2087     V(bic, Bic)                    \
2088     V(bif, Bif)                    \
2089     V(bit, Bit)                    \
2090     V(bsl, Bsl)                    \
2091     V(cmeq, Cmeq)                  \
2092     V(cmge, Cmge)                  \
2093     V(cmgt, Cmgt)                  \
2094     V(cmhi, Cmhi)                  \
2095     V(cmhs, Cmhs)                  \
2096     V(cmtst, Cmtst)                \
2097     V(eor, Eor)                    \
2098     V(fabd, Fabd)                  \
2099     V(facge, Facge)                \
2100     V(facgt, Facgt)                \
2101     V(faddp, Faddp)                \
2102     V(fcmeq, Fcmeq)                \
2103     V(fcmge, Fcmge)                \
2104     V(fcmgt, Fcmgt)                \
2105     V(fmaxnmp, Fmaxnmp)            \
2106     V(fmaxp, Fmaxp)                \
2107     V(fminnmp, Fminnmp)            \
2108     V(fminp, Fminp)                \
2109     V(fmla, Fmla)                  \
2110     V(fmls, Fmls)                  \
2111     V(fmulx, Fmulx)                \
2112     V(frecps, Frecps)              \
2113     V(frsqrts, Frsqrts)            \
2114     V(mla, Mla)                    \
2115     V(mls, Mls)                    \
2116     V(mul, Mul)                    \
2117     V(orn, Orn)                    \
2118     V(orr, Orr)                    \
2119     V(pmul, Pmul)                  \
2120     V(pmull, Pmull)                \
2121     V(pmull2, Pmull2)              \
2122     V(raddhn, Raddhn)              \
2123     V(raddhn2, Raddhn2)            \
2124     V(rsubhn, Rsubhn)              \
2125     V(rsubhn2, Rsubhn2)            \
2126     V(saba, Saba)                  \
2127     V(sabal, Sabal)                \
2128     V(sabal2, Sabal2)              \
2129     V(sabd, Sabd)                  \
2130     V(sabdl, Sabdl)                \
2131     V(sabdl2, Sabdl2)              \
2132     V(saddl, Saddl)                \
2133     V(saddl2, Saddl2)              \
2134     V(saddw, Saddw)                \
2135     V(saddw2, Saddw2)              \
2136     V(shadd, Shadd)                \
2137     V(shsub, Shsub)                \
2138     V(smax, Smax)                  \
2139     V(smaxp, Smaxp)                \
2140     V(smin, Smin)                  \
2141     V(sminp, Sminp)                \
2142     V(smlal, Smlal)                \
2143     V(smlal2, Smlal2)              \
2144     V(smlsl, Smlsl)                \
2145     V(smlsl2, Smlsl2)              \
2146     V(smull, Smull)                \
2147     V(smull2, Smull2)              \
2148     V(sqadd, Sqadd)                \
2149     V(sqdmlal, Sqdmlal)            \
2150     V(sqdmlal2, Sqdmlal2)          \
2151     V(sqdmlsl, Sqdmlsl)            \
2152     V(sqdmlsl2, Sqdmlsl2)          \
2153     V(sqdmulh, Sqdmulh)            \
2154     V(sqdmull, Sqdmull)            \
2155     V(sqdmull2, Sqdmull2)          \
2156     V(sqrdmulh, Sqrdmulh)          \
2157     V(sqrshl, Sqrshl)              \
2158     V(sqshl, Sqshl)                \
2159     V(sqsub, Sqsub)                \
2160     V(srhadd, Srhadd)              \
2161     V(srshl, Srshl)                \
2162     V(sshl, Sshl)                  \
2163     V(ssubl, Ssubl)                \
2164     V(ssubl2, Ssubl2)              \
2165     V(ssubw, Ssubw)                \
2166     V(ssubw2, Ssubw2)              \
2167     V(sub, Sub)                    \
2168     V(subhn, Subhn)                \
2169     V(subhn2, Subhn2)              \
2170     V(trn1, Trn1)                  \
2171     V(trn2, Trn2)                  \
2172     V(uaba, Uaba)                  \
2173     V(uabal, Uabal)                \
2174     V(uabal2, Uabal2)              \
2175     V(uabd, Uabd)                  \
2176     V(uabdl, Uabdl)                \
2177     V(uabdl2, Uabdl2)              \
2178     V(uaddl, Uaddl)                \
2179     V(uaddl2, Uaddl2)              \
2180     V(uaddw, Uaddw)                \
2181     V(uaddw2, Uaddw2)              \
2182     V(uhadd, Uhadd)                \
2183     V(uhsub, Uhsub)                \
2184     V(umax, Umax)                  \
2185     V(umaxp, Umaxp)                \
2186     V(umin, Umin)                  \
2187     V(uminp, Uminp)                \
2188     V(umlal, Umlal)                \
2189     V(umlal2, Umlal2)              \
2190     V(umlsl, Umlsl)                \
2191     V(umlsl2, Umlsl2)              \
2192     V(umull, Umull)                \
2193     V(umull2, Umull2)              \
2194     V(uqadd, Uqadd)                \
2195     V(uqrshl, Uqrshl)              \
2196     V(uqshl, Uqshl)                \
2197     V(uqsub, Uqsub)                \
2198     V(urhadd, Urhadd)              \
2199     V(urshl, Urshl)                \
2200     V(ushl, Ushl)                  \
2201     V(usubl, Usubl)                \
2202     V(usubl2, Usubl2)              \
2203     V(usubw, Usubw)                \
2204     V(usubw2, Usubw2)              \
2205     V(uzp1, Uzp1)                  \
2206     V(uzp2, Uzp2)                  \
2207     V(zip1, Zip1)                  \
2208     V(zip2, Zip2)
2209 
2210   #define DEFINE_MACRO_ASM_FUNC(ASM, MASM)   \
2211   void MASM(const VRegister& vd,             \
2212             const VRegister& vn,             \
2213             const VRegister& vm) {           \
2214     VIXL_ASSERT(allow_macro_instructions_);  \
2215     SingleEmissionCheckScope guard(this);    \
2216     ASM(vd, vn, vm);                         \
2217   }
2218   NEON_3VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2219   #undef DEFINE_MACRO_ASM_FUNC
2220 
2221   // NEON 2 vector register instructions.
2222   #define NEON_2VREG_MACRO_LIST(V) \
2223     V(abs,     Abs)                \
2224     V(addp,    Addp)               \
2225     V(addv,    Addv)               \
2226     V(cls,     Cls)                \
2227     V(clz,     Clz)                \
2228     V(cnt,     Cnt)                \
2229     V(fabs,    Fabs)               \
2230     V(faddp,   Faddp)              \
2231     V(fcvtas,  Fcvtas)             \
2232     V(fcvtau,  Fcvtau)             \
2233     V(fcvtms,  Fcvtms)             \
2234     V(fcvtmu,  Fcvtmu)             \
2235     V(fcvtns,  Fcvtns)             \
2236     V(fcvtnu,  Fcvtnu)             \
2237     V(fcvtps,  Fcvtps)             \
2238     V(fcvtpu,  Fcvtpu)             \
2239     V(fmaxnmp, Fmaxnmp)            \
2240     V(fmaxnmv, Fmaxnmv)            \
2241     V(fmaxp,   Fmaxp)              \
2242     V(fmaxv,   Fmaxv)              \
2243     V(fminnmp, Fminnmp)            \
2244     V(fminnmv, Fminnmv)            \
2245     V(fminp,   Fminp)              \
2246     V(fminv,   Fminv)              \
2247     V(fneg,    Fneg)               \
2248     V(frecpe,  Frecpe)             \
2249     V(frecpx,  Frecpx)             \
2250     V(frinta,  Frinta)             \
2251     V(frinti,  Frinti)             \
2252     V(frintm,  Frintm)             \
2253     V(frintn,  Frintn)             \
2254     V(frintp,  Frintp)             \
2255     V(frintx,  Frintx)             \
2256     V(frintz,  Frintz)             \
2257     V(frsqrte, Frsqrte)            \
2258     V(fsqrt,   Fsqrt)              \
2259     V(mov,     Mov)                \
2260     V(mvn,     Mvn)                \
2261     V(neg,     Neg)                \
2262     V(not_,    Not)                \
2263     V(rbit,    Rbit)               \
2264     V(rev16,   Rev16)              \
2265     V(rev32,   Rev32)              \
2266     V(rev64,   Rev64)              \
2267     V(sadalp,  Sadalp)             \
2268     V(saddlp,  Saddlp)             \
2269     V(saddlv,  Saddlv)             \
2270     V(smaxv,   Smaxv)              \
2271     V(sminv,   Sminv)              \
2272     V(sqabs,   Sqabs)              \
2273     V(sqneg,   Sqneg)              \
2274     V(sqxtn,   Sqxtn)              \
2275     V(sqxtn2,  Sqxtn2)             \
2276     V(sqxtun,  Sqxtun)             \
2277     V(sqxtun2, Sqxtun2)            \
2278     V(suqadd,  Suqadd)             \
2279     V(sxtl,    Sxtl)               \
2280     V(sxtl2,   Sxtl2)              \
2281     V(uadalp,  Uadalp)             \
2282     V(uaddlp,  Uaddlp)             \
2283     V(uaddlv,  Uaddlv)             \
2284     V(umaxv,   Umaxv)              \
2285     V(uminv,   Uminv)              \
2286     V(uqxtn,   Uqxtn)              \
2287     V(uqxtn2,  Uqxtn2)             \
2288     V(urecpe,  Urecpe)             \
2289     V(ursqrte, Ursqrte)            \
2290     V(usqadd,  Usqadd)             \
2291     V(uxtl,    Uxtl)               \
2292     V(uxtl2,   Uxtl2)              \
2293     V(xtn,     Xtn)                \
2294     V(xtn2,    Xtn2)
2295 
2296   #define DEFINE_MACRO_ASM_FUNC(ASM, MASM)   \
2297   void MASM(const VRegister& vd,             \
2298             const VRegister& vn) {           \
2299     VIXL_ASSERT(allow_macro_instructions_);  \
2300     SingleEmissionCheckScope guard(this);    \
2301     ASM(vd, vn);                             \
2302   }
NEON_2VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)2303   NEON_2VREG_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2304   #undef DEFINE_MACRO_ASM_FUNC
2305 
2306   // NEON 2 vector register with immediate instructions.
2307   #define NEON_2VREG_FPIMM_MACRO_LIST(V) \
2308     V(fcmeq, Fcmeq)                      \
2309     V(fcmge, Fcmge)                      \
2310     V(fcmgt, Fcmgt)                      \
2311     V(fcmle, Fcmle)                      \
2312     V(fcmlt, Fcmlt)
2313 
2314   #define DEFINE_MACRO_ASM_FUNC(ASM, MASM)   \
2315   void MASM(const VRegister& vd,             \
2316             const VRegister& vn,             \
2317             double imm) {                    \
2318     VIXL_ASSERT(allow_macro_instructions_);  \
2319     SingleEmissionCheckScope guard(this);    \
2320     ASM(vd, vn, imm);                        \
2321   }
2322   NEON_2VREG_FPIMM_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2323   #undef DEFINE_MACRO_ASM_FUNC
2324 
2325   // NEON by element instructions.
2326   #define NEON_BYELEMENT_MACRO_LIST(V) \
2327     V(fmul, Fmul)                      \
2328     V(fmla, Fmla)                      \
2329     V(fmls, Fmls)                      \
2330     V(fmulx, Fmulx)                    \
2331     V(mul, Mul)                        \
2332     V(mla, Mla)                        \
2333     V(mls, Mls)                        \
2334     V(sqdmulh, Sqdmulh)                \
2335     V(sqrdmulh, Sqrdmulh)              \
2336     V(sqdmull,  Sqdmull)               \
2337     V(sqdmull2, Sqdmull2)              \
2338     V(sqdmlal,  Sqdmlal)               \
2339     V(sqdmlal2, Sqdmlal2)              \
2340     V(sqdmlsl,  Sqdmlsl)               \
2341     V(sqdmlsl2, Sqdmlsl2)              \
2342     V(smull,  Smull)                   \
2343     V(smull2, Smull2)                  \
2344     V(smlal,  Smlal)                   \
2345     V(smlal2, Smlal2)                  \
2346     V(smlsl,  Smlsl)                   \
2347     V(smlsl2, Smlsl2)                  \
2348     V(umull,  Umull)                   \
2349     V(umull2, Umull2)                  \
2350     V(umlal,  Umlal)                   \
2351     V(umlal2, Umlal2)                  \
2352     V(umlsl,  Umlsl)                   \
2353     V(umlsl2, Umlsl2)
2354 
2355   #define DEFINE_MACRO_ASM_FUNC(ASM, MASM)   \
2356   void MASM(const VRegister& vd,             \
2357             const VRegister& vn,             \
2358             const VRegister& vm,             \
2359             int vm_index                     \
2360             ) {                              \
2361     VIXL_ASSERT(allow_macro_instructions_);  \
2362     SingleEmissionCheckScope guard(this);    \
2363     ASM(vd, vn, vm, vm_index);               \
2364   }
2365   NEON_BYELEMENT_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2366   #undef DEFINE_MACRO_ASM_FUNC
2367 
2368   #define NEON_2VREG_SHIFT_MACRO_LIST(V) \
2369     V(rshrn,     Rshrn)                  \
2370     V(rshrn2,    Rshrn2)                 \
2371     V(shl,       Shl)                    \
2372     V(shll,      Shll)                   \
2373     V(shll2,     Shll2)                  \
2374     V(shrn,      Shrn)                   \
2375     V(shrn2,     Shrn2)                  \
2376     V(sli,       Sli)                    \
2377     V(sqrshrn,   Sqrshrn)                \
2378     V(sqrshrn2,  Sqrshrn2)               \
2379     V(sqrshrun,  Sqrshrun)               \
2380     V(sqrshrun2, Sqrshrun2)              \
2381     V(sqshl,     Sqshl)                  \
2382     V(sqshlu,    Sqshlu)                 \
2383     V(sqshrn,    Sqshrn)                 \
2384     V(sqshrn2,   Sqshrn2)                \
2385     V(sqshrun,   Sqshrun)                \
2386     V(sqshrun2,  Sqshrun2)               \
2387     V(sri,       Sri)                    \
2388     V(srshr,     Srshr)                  \
2389     V(srsra,     Srsra)                  \
2390     V(sshll,     Sshll)                  \
2391     V(sshll2,    Sshll2)                 \
2392     V(sshr,      Sshr)                   \
2393     V(ssra,      Ssra)                   \
2394     V(uqrshrn,   Uqrshrn)                \
2395     V(uqrshrn2,  Uqrshrn2)               \
2396     V(uqshl,     Uqshl)                  \
2397     V(uqshrn,    Uqshrn)                 \
2398     V(uqshrn2,   Uqshrn2)                \
2399     V(urshr,     Urshr)                  \
2400     V(ursra,     Ursra)                  \
2401     V(ushll,     Ushll)                  \
2402     V(ushll2,    Ushll2)                 \
2403     V(ushr,      Ushr)                   \
2404     V(usra,      Usra)                   \
2405 
2406   #define DEFINE_MACRO_ASM_FUNC(ASM, MASM)   \
2407   void MASM(const VRegister& vd,             \
2408             const VRegister& vn,             \
2409             int shift) {                     \
2410     VIXL_ASSERT(allow_macro_instructions_);  \
2411     SingleEmissionCheckScope guard(this);    \
2412     ASM(vd, vn, shift);                      \
2413   }
2414   NEON_2VREG_SHIFT_MACRO_LIST(DEFINE_MACRO_ASM_FUNC)
2415   #undef DEFINE_MACRO_ASM_FUNC
2416 
2417   void Bic(const VRegister& vd,
2418            const int imm8,
2419            const int left_shift = 0) {
2420     VIXL_ASSERT(allow_macro_instructions_);
2421     SingleEmissionCheckScope guard(this);
2422     bic(vd, imm8, left_shift);
2423   }
Cmeq(const VRegister & vd,const VRegister & vn,int imm)2424   void Cmeq(const VRegister& vd,
2425             const VRegister& vn,
2426             int imm) {
2427     VIXL_ASSERT(allow_macro_instructions_);
2428     SingleEmissionCheckScope guard(this);
2429     cmeq(vd, vn, imm);
2430   }
Cmge(const VRegister & vd,const VRegister & vn,int imm)2431   void Cmge(const VRegister& vd,
2432             const VRegister& vn,
2433             int imm) {
2434     VIXL_ASSERT(allow_macro_instructions_);
2435     SingleEmissionCheckScope guard(this);
2436     cmge(vd, vn, imm);
2437   }
Cmgt(const VRegister & vd,const VRegister & vn,int imm)2438   void Cmgt(const VRegister& vd,
2439             const VRegister& vn,
2440             int imm) {
2441     VIXL_ASSERT(allow_macro_instructions_);
2442     SingleEmissionCheckScope guard(this);
2443     cmgt(vd, vn, imm);
2444   }
Cmle(const VRegister & vd,const VRegister & vn,int imm)2445   void Cmle(const VRegister& vd,
2446             const VRegister& vn,
2447             int imm) {
2448     VIXL_ASSERT(allow_macro_instructions_);
2449     SingleEmissionCheckScope guard(this);
2450     cmle(vd, vn, imm);
2451   }
Cmlt(const VRegister & vd,const VRegister & vn,int imm)2452   void Cmlt(const VRegister& vd,
2453             const VRegister& vn,
2454             int imm) {
2455     VIXL_ASSERT(allow_macro_instructions_);
2456     SingleEmissionCheckScope guard(this);
2457     cmlt(vd, vn, imm);
2458   }
Dup(const VRegister & vd,const VRegister & vn,int index)2459   void Dup(const VRegister& vd,
2460            const VRegister& vn,
2461            int index) {
2462     VIXL_ASSERT(allow_macro_instructions_);
2463     SingleEmissionCheckScope guard(this);
2464     dup(vd, vn, index);
2465   }
Dup(const VRegister & vd,const Register & rn)2466   void Dup(const VRegister& vd,
2467            const Register& rn) {
2468     VIXL_ASSERT(allow_macro_instructions_);
2469     SingleEmissionCheckScope guard(this);
2470     dup(vd, rn);
2471   }
Ext(const VRegister & vd,const VRegister & vn,const VRegister & vm,int index)2472   void Ext(const VRegister& vd,
2473            const VRegister& vn,
2474            const VRegister& vm,
2475            int index) {
2476     VIXL_ASSERT(allow_macro_instructions_);
2477     SingleEmissionCheckScope guard(this);
2478     ext(vd, vn, vm, index);
2479   }
Ins(const VRegister & vd,int vd_index,const VRegister & vn,int vn_index)2480   void Ins(const VRegister& vd,
2481            int vd_index,
2482            const VRegister& vn,
2483            int vn_index) {
2484     VIXL_ASSERT(allow_macro_instructions_);
2485     SingleEmissionCheckScope guard(this);
2486     ins(vd, vd_index, vn, vn_index);
2487   }
Ins(const VRegister & vd,int vd_index,const Register & rn)2488   void Ins(const VRegister& vd,
2489            int vd_index,
2490            const Register& rn) {
2491     VIXL_ASSERT(allow_macro_instructions_);
2492     SingleEmissionCheckScope guard(this);
2493     ins(vd, vd_index, rn);
2494   }
Ld1(const VRegister & vt,const MemOperand & src)2495   void Ld1(const VRegister& vt,
2496            const MemOperand& src) {
2497     VIXL_ASSERT(allow_macro_instructions_);
2498     SingleEmissionCheckScope guard(this);
2499     ld1(vt, src);
2500   }
Ld1(const VRegister & vt,const VRegister & vt2,const MemOperand & src)2501   void Ld1(const VRegister& vt,
2502            const VRegister& vt2,
2503            const MemOperand& src) {
2504     VIXL_ASSERT(allow_macro_instructions_);
2505     SingleEmissionCheckScope guard(this);
2506     ld1(vt, vt2, src);
2507   }
Ld1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)2508   void Ld1(const VRegister& vt,
2509            const VRegister& vt2,
2510            const VRegister& vt3,
2511            const MemOperand& src) {
2512     VIXL_ASSERT(allow_macro_instructions_);
2513     SingleEmissionCheckScope guard(this);
2514     ld1(vt, vt2, vt3, src);
2515   }
Ld1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)2516   void Ld1(const VRegister& vt,
2517            const VRegister& vt2,
2518            const VRegister& vt3,
2519            const VRegister& vt4,
2520            const MemOperand& src) {
2521     VIXL_ASSERT(allow_macro_instructions_);
2522     SingleEmissionCheckScope guard(this);
2523     ld1(vt, vt2, vt3, vt4, src);
2524   }
Ld1(const VRegister & vt,int lane,const MemOperand & src)2525   void Ld1(const VRegister& vt,
2526            int lane,
2527            const MemOperand& src) {
2528     VIXL_ASSERT(allow_macro_instructions_);
2529     SingleEmissionCheckScope guard(this);
2530     ld1(vt, lane, src);
2531   }
Ld1r(const VRegister & vt,const MemOperand & src)2532   void Ld1r(const VRegister& vt,
2533             const MemOperand& src) {
2534     VIXL_ASSERT(allow_macro_instructions_);
2535     SingleEmissionCheckScope guard(this);
2536     ld1r(vt, src);
2537   }
Ld2(const VRegister & vt,const VRegister & vt2,const MemOperand & src)2538   void Ld2(const VRegister& vt,
2539            const VRegister& vt2,
2540            const MemOperand& src) {
2541     VIXL_ASSERT(allow_macro_instructions_);
2542     SingleEmissionCheckScope guard(this);
2543     ld2(vt, vt2, src);
2544   }
Ld2(const VRegister & vt,const VRegister & vt2,int lane,const MemOperand & src)2545   void Ld2(const VRegister& vt,
2546            const VRegister& vt2,
2547            int lane,
2548            const MemOperand& src) {
2549     VIXL_ASSERT(allow_macro_instructions_);
2550     SingleEmissionCheckScope guard(this);
2551     ld2(vt, vt2, lane, src);
2552   }
Ld2r(const VRegister & vt,const VRegister & vt2,const MemOperand & src)2553   void Ld2r(const VRegister& vt,
2554             const VRegister& vt2,
2555             const MemOperand& src) {
2556     VIXL_ASSERT(allow_macro_instructions_);
2557     SingleEmissionCheckScope guard(this);
2558     ld2r(vt, vt2, src);
2559   }
Ld3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)2560   void Ld3(const VRegister& vt,
2561            const VRegister& vt2,
2562            const VRegister& vt3,
2563            const MemOperand& src) {
2564     VIXL_ASSERT(allow_macro_instructions_);
2565     SingleEmissionCheckScope guard(this);
2566     ld3(vt, vt2, vt3, src);
2567   }
Ld3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,int lane,const MemOperand & src)2568   void Ld3(const VRegister& vt,
2569            const VRegister& vt2,
2570            const VRegister& vt3,
2571            int lane,
2572            const MemOperand& src) {
2573     VIXL_ASSERT(allow_macro_instructions_);
2574     SingleEmissionCheckScope guard(this);
2575     ld3(vt, vt2, vt3, lane, src);
2576   }
Ld3r(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & src)2577   void Ld3r(const VRegister& vt,
2578             const VRegister& vt2,
2579             const VRegister& vt3,
2580            const MemOperand& src) {
2581     VIXL_ASSERT(allow_macro_instructions_);
2582     SingleEmissionCheckScope guard(this);
2583     ld3r(vt, vt2, vt3, src);
2584   }
Ld4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)2585   void Ld4(const VRegister& vt,
2586            const VRegister& vt2,
2587            const VRegister& vt3,
2588            const VRegister& vt4,
2589            const MemOperand& src) {
2590     VIXL_ASSERT(allow_macro_instructions_);
2591     SingleEmissionCheckScope guard(this);
2592     ld4(vt, vt2, vt3, vt4, src);
2593   }
Ld4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,int lane,const MemOperand & src)2594   void Ld4(const VRegister& vt,
2595            const VRegister& vt2,
2596            const VRegister& vt3,
2597            const VRegister& vt4,
2598            int lane,
2599            const MemOperand& src) {
2600     VIXL_ASSERT(allow_macro_instructions_);
2601     SingleEmissionCheckScope guard(this);
2602     ld4(vt, vt2, vt3, vt4, lane, src);
2603   }
Ld4r(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & src)2604   void Ld4r(const VRegister& vt,
2605             const VRegister& vt2,
2606             const VRegister& vt3,
2607             const VRegister& vt4,
2608            const MemOperand& src) {
2609     VIXL_ASSERT(allow_macro_instructions_);
2610     SingleEmissionCheckScope guard(this);
2611     ld4r(vt, vt2, vt3, vt4, src);
2612   }
Mov(const VRegister & vd,int vd_index,const VRegister & vn,int vn_index)2613   void Mov(const VRegister& vd,
2614            int vd_index,
2615            const VRegister& vn,
2616            int vn_index) {
2617     VIXL_ASSERT(allow_macro_instructions_);
2618     SingleEmissionCheckScope guard(this);
2619     mov(vd, vd_index, vn, vn_index);
2620   }
Mov(const VRegister & vd,const VRegister & vn,int index)2621   void Mov(const VRegister& vd,
2622            const VRegister& vn,
2623            int index) {
2624     VIXL_ASSERT(allow_macro_instructions_);
2625     SingleEmissionCheckScope guard(this);
2626     mov(vd, vn, index);
2627   }
Mov(const VRegister & vd,int vd_index,const Register & rn)2628   void Mov(const VRegister& vd,
2629            int vd_index,
2630            const Register& rn) {
2631     VIXL_ASSERT(allow_macro_instructions_);
2632     SingleEmissionCheckScope guard(this);
2633     mov(vd, vd_index, rn);
2634   }
Mov(const Register & rd,const VRegister & vn,int vn_index)2635   void Mov(const Register& rd,
2636            const VRegister& vn,
2637            int vn_index) {
2638     VIXL_ASSERT(allow_macro_instructions_);
2639     SingleEmissionCheckScope guard(this);
2640     mov(rd, vn, vn_index);
2641   }
2642   void Movi(const VRegister& vd,
2643             uint64_t imm,
2644             Shift shift = LSL,
2645             int shift_amount = 0);
2646   void Movi(const VRegister& vd, uint64_t hi, uint64_t lo);
2647   void Mvni(const VRegister& vd,
2648             const int imm8,
2649             Shift shift = LSL,
2650             const int shift_amount = 0) {
2651     VIXL_ASSERT(allow_macro_instructions_);
2652     SingleEmissionCheckScope guard(this);
2653     mvni(vd, imm8, shift, shift_amount);
2654   }
2655   void Orr(const VRegister& vd,
2656            const int imm8,
2657            const int left_shift = 0) {
2658     VIXL_ASSERT(allow_macro_instructions_);
2659     SingleEmissionCheckScope guard(this);
2660     orr(vd, imm8, left_shift);
2661   }
2662   void Scvtf(const VRegister& vd,
2663              const VRegister& vn,
2664              int fbits = 0) {
2665     VIXL_ASSERT(allow_macro_instructions_);
2666     SingleEmissionCheckScope guard(this);
2667     scvtf(vd, vn, fbits);
2668   }
2669   void Ucvtf(const VRegister& vd,
2670              const VRegister& vn,
2671              int fbits = 0) {
2672     VIXL_ASSERT(allow_macro_instructions_);
2673     SingleEmissionCheckScope guard(this);
2674     ucvtf(vd, vn, fbits);
2675   }
2676   void Fcvtzs(const VRegister& vd,
2677               const VRegister& vn,
2678               int fbits = 0) {
2679     VIXL_ASSERT(allow_macro_instructions_);
2680     SingleEmissionCheckScope guard(this);
2681     fcvtzs(vd, vn, fbits);
2682   }
2683   void Fcvtzu(const VRegister& vd,
2684               const VRegister& vn,
2685               int fbits = 0) {
2686     VIXL_ASSERT(allow_macro_instructions_);
2687     SingleEmissionCheckScope guard(this);
2688     fcvtzu(vd, vn, fbits);
2689   }
St1(const VRegister & vt,const MemOperand & dst)2690   void St1(const VRegister& vt,
2691            const MemOperand& dst) {
2692     VIXL_ASSERT(allow_macro_instructions_);
2693     SingleEmissionCheckScope guard(this);
2694     st1(vt, dst);
2695   }
St1(const VRegister & vt,const VRegister & vt2,const MemOperand & dst)2696   void St1(const VRegister& vt,
2697            const VRegister& vt2,
2698            const MemOperand& dst) {
2699     VIXL_ASSERT(allow_macro_instructions_);
2700     SingleEmissionCheckScope guard(this);
2701     st1(vt, vt2, dst);
2702   }
St1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & dst)2703   void St1(const VRegister& vt,
2704            const VRegister& vt2,
2705            const VRegister& vt3,
2706            const MemOperand& dst) {
2707     VIXL_ASSERT(allow_macro_instructions_);
2708     SingleEmissionCheckScope guard(this);
2709     st1(vt, vt2, vt3, dst);
2710   }
St1(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & dst)2711   void St1(const VRegister& vt,
2712            const VRegister& vt2,
2713            const VRegister& vt3,
2714            const VRegister& vt4,
2715            const MemOperand& dst) {
2716     VIXL_ASSERT(allow_macro_instructions_);
2717     SingleEmissionCheckScope guard(this);
2718     st1(vt, vt2, vt3, vt4, dst);
2719   }
St1(const VRegister & vt,int lane,const MemOperand & dst)2720   void St1(const VRegister& vt,
2721            int lane,
2722            const MemOperand& dst) {
2723     VIXL_ASSERT(allow_macro_instructions_);
2724     SingleEmissionCheckScope guard(this);
2725     st1(vt, lane, dst);
2726   }
St2(const VRegister & vt,const VRegister & vt2,const MemOperand & dst)2727   void St2(const VRegister& vt,
2728            const VRegister& vt2,
2729            const MemOperand& dst) {
2730     VIXL_ASSERT(allow_macro_instructions_);
2731     SingleEmissionCheckScope guard(this);
2732     st2(vt, vt2, dst);
2733   }
St3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const MemOperand & dst)2734   void St3(const VRegister& vt,
2735            const VRegister& vt2,
2736            const VRegister& vt3,
2737            const MemOperand& dst) {
2738     VIXL_ASSERT(allow_macro_instructions_);
2739     SingleEmissionCheckScope guard(this);
2740     st3(vt, vt2, vt3, dst);
2741   }
St4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,const MemOperand & dst)2742   void St4(const VRegister& vt,
2743            const VRegister& vt2,
2744            const VRegister& vt3,
2745            const VRegister& vt4,
2746            const MemOperand& dst) {
2747     VIXL_ASSERT(allow_macro_instructions_);
2748     SingleEmissionCheckScope guard(this);
2749     st4(vt, vt2, vt3, vt4, dst);
2750   }
St2(const VRegister & vt,const VRegister & vt2,int lane,const MemOperand & dst)2751   void St2(const VRegister& vt,
2752            const VRegister& vt2,
2753            int lane,
2754            const MemOperand& dst) {
2755     VIXL_ASSERT(allow_macro_instructions_);
2756     SingleEmissionCheckScope guard(this);
2757     st2(vt, vt2, lane, dst);
2758   }
St3(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,int lane,const MemOperand & dst)2759   void St3(const VRegister& vt,
2760            const VRegister& vt2,
2761            const VRegister& vt3,
2762            int lane,
2763            const MemOperand& dst) {
2764     VIXL_ASSERT(allow_macro_instructions_);
2765     SingleEmissionCheckScope guard(this);
2766     st3(vt, vt2, vt3, lane, dst);
2767   }
St4(const VRegister & vt,const VRegister & vt2,const VRegister & vt3,const VRegister & vt4,int lane,const MemOperand & dst)2768   void St4(const VRegister& vt,
2769            const VRegister& vt2,
2770            const VRegister& vt3,
2771            const VRegister& vt4,
2772            int lane,
2773            const MemOperand& dst) {
2774     VIXL_ASSERT(allow_macro_instructions_);
2775     SingleEmissionCheckScope guard(this);
2776     st4(vt, vt2, vt3, vt4, lane, dst);
2777   }
Smov(const Register & rd,const VRegister & vn,int vn_index)2778   void Smov(const Register& rd,
2779             const VRegister& vn,
2780             int vn_index) {
2781     VIXL_ASSERT(allow_macro_instructions_);
2782     SingleEmissionCheckScope guard(this);
2783     smov(rd, vn, vn_index);
2784   }
Umov(const Register & rd,const VRegister & vn,int vn_index)2785   void Umov(const Register& rd,
2786             const VRegister& vn,
2787             int vn_index) {
2788     VIXL_ASSERT(allow_macro_instructions_);
2789     SingleEmissionCheckScope guard(this);
2790     umov(rd, vn, vn_index);
2791   }
Crc32b(const Register & rd,const Register & rn,const Register & rm)2792   void Crc32b(const Register& rd,
2793               const Register& rn,
2794               const Register& rm) {
2795     VIXL_ASSERT(allow_macro_instructions_);
2796     SingleEmissionCheckScope guard(this);
2797     crc32b(rd, rn, rm);
2798   }
Crc32h(const Register & rd,const Register & rn,const Register & rm)2799   void Crc32h(const Register& rd,
2800               const Register& rn,
2801               const Register& rm) {
2802     VIXL_ASSERT(allow_macro_instructions_);
2803     SingleEmissionCheckScope guard(this);
2804     crc32h(rd, rn, rm);
2805   }
Crc32w(const Register & rd,const Register & rn,const Register & rm)2806   void Crc32w(const Register& rd,
2807               const Register& rn,
2808               const Register& rm) {
2809     VIXL_ASSERT(allow_macro_instructions_);
2810     SingleEmissionCheckScope guard(this);
2811     crc32w(rd, rn, rm);
2812   }
Crc32x(const Register & rd,const Register & rn,const Register & rm)2813   void Crc32x(const Register& rd,
2814               const Register& rn,
2815               const Register& rm) {
2816     VIXL_ASSERT(allow_macro_instructions_);
2817     SingleEmissionCheckScope guard(this);
2818     crc32x(rd, rn, rm);
2819   }
Crc32cb(const Register & rd,const Register & rn,const Register & rm)2820   void Crc32cb(const Register& rd,
2821                const Register& rn,
2822                const Register& rm) {
2823     VIXL_ASSERT(allow_macro_instructions_);
2824     SingleEmissionCheckScope guard(this);
2825     crc32cb(rd, rn, rm);
2826   }
Crc32ch(const Register & rd,const Register & rn,const Register & rm)2827   void Crc32ch(const Register& rd,
2828                const Register& rn,
2829                const Register& rm) {
2830     VIXL_ASSERT(allow_macro_instructions_);
2831     SingleEmissionCheckScope guard(this);
2832     crc32ch(rd, rn, rm);
2833   }
Crc32cw(const Register & rd,const Register & rn,const Register & rm)2834   void Crc32cw(const Register& rd,
2835                const Register& rn,
2836                const Register& rm) {
2837     VIXL_ASSERT(allow_macro_instructions_);
2838     SingleEmissionCheckScope guard(this);
2839     crc32cw(rd, rn, rm);
2840   }
Crc32cx(const Register & rd,const Register & rn,const Register & rm)2841   void Crc32cx(const Register& rd,
2842                const Register& rn,
2843                const Register& rm) {
2844     VIXL_ASSERT(allow_macro_instructions_);
2845     SingleEmissionCheckScope guard(this);
2846     crc32cx(rd, rn, rm);
2847   }
2848   // Push the system stack pointer (sp) down to allow the same to be done to
2849   // the current stack pointer (according to StackPointer()). This must be
2850   // called _before_ accessing the memory.
2851   //
2852   // This is necessary when pushing or otherwise adding things to the stack, to
2853   // satisfy the AAPCS64 constraint that the memory below the system stack
2854   // pointer is not accessed.
2855   //
2856   // This method asserts that StackPointer() is not sp, since the call does
2857   // not make sense in that context.
2858   //
2859   // TODO: This method can only accept values of 'space' that can be encoded in
2860   // one instruction. Refer to the implementation for details.
2861   void BumpSystemStackPointer(const Operand& space);
2862 
2863 #if VIXL_DEBUG
SetAllowMacroInstructions(bool value)2864   void SetAllowMacroInstructions(bool value) {
2865     allow_macro_instructions_ = value;
2866   }
2867 
AllowMacroInstructions()2868   bool AllowMacroInstructions() const {
2869     return allow_macro_instructions_;
2870   }
2871 #endif
2872 
BlockLiteralPool()2873   void BlockLiteralPool() { literal_pool_.Block(); }
ReleaseLiteralPool()2874   void ReleaseLiteralPool() { literal_pool_.Release(); }
IsLiteralPoolBlocked()2875   bool IsLiteralPoolBlocked() const { return literal_pool_.IsBlocked(); }
BlockVeneerPool()2876   void BlockVeneerPool() { veneer_pool_.Block(); }
ReleaseVeneerPool()2877   void ReleaseVeneerPool() { veneer_pool_.Release(); }
IsVeneerPoolBlocked()2878   bool IsVeneerPoolBlocked() const { return veneer_pool_.IsBlocked(); }
2879 
LiteralPoolSize()2880   size_t LiteralPoolSize() const {
2881     return literal_pool_.Size();
2882   }
2883 
LiteralPoolMaxSize()2884   size_t LiteralPoolMaxSize() const {
2885     return literal_pool_.MaxSize();
2886   }
2887 
VeneerPoolMaxSize()2888   size_t VeneerPoolMaxSize() const {
2889     return veneer_pool_.MaxSize();
2890   }
2891 
2892   // The number of unresolved branches that may require a veneer.
NumberOfPotentialVeneers()2893   int NumberOfPotentialVeneers() const {
2894     return veneer_pool_.NumberOfPotentialVeneers();
2895   }
2896 
NextCheckPoint()2897   ptrdiff_t NextCheckPoint() {
2898     ptrdiff_t next_checkpoint_for_pools = std::min(literal_pool_.checkpoint(),
2899                                                    veneer_pool_.checkpoint());
2900     return std::min(next_checkpoint_for_pools, BufferEndOffset());
2901   }
2902 
EmitLiteralPool(LiteralPool::EmitOption option)2903   void EmitLiteralPool(LiteralPool::EmitOption option) {
2904     if (!literal_pool_.IsEmpty()) literal_pool_.Emit(option);
2905 
2906     checkpoint_ = NextCheckPoint();
2907     recommended_checkpoint_ = literal_pool_.NextRecommendedCheckpoint();
2908   }
2909 
2910   void CheckEmitFor(size_t amount);
EnsureEmitFor(size_t amount)2911   void EnsureEmitFor(size_t amount) {
2912     ptrdiff_t offset = amount;
2913     ptrdiff_t max_pools_size = literal_pool_.MaxSize() + veneer_pool_.MaxSize();
2914     ptrdiff_t cursor = CursorOffset();
2915     if ((cursor >= recommended_checkpoint_) ||
2916         ((cursor + offset + max_pools_size) >= checkpoint_)) {
2917       CheckEmitFor(amount);
2918     }
2919   }
2920 
2921   // Set the current stack pointer, but don't generate any code.
SetStackPointer(const Register & stack_pointer)2922   void SetStackPointer(const Register& stack_pointer) {
2923     VIXL_ASSERT(!TmpList()->IncludesAliasOf(stack_pointer));
2924     sp_ = stack_pointer;
2925   }
2926 
2927   // Return the current stack pointer, as set by SetStackPointer.
StackPointer()2928   const Register& StackPointer() const {
2929     return sp_;
2930   }
2931 
TmpList()2932   CPURegList* TmpList() { return &tmp_list_; }
FPTmpList()2933   CPURegList* FPTmpList() { return &fptmp_list_; }
2934 
2935   // Like printf, but print at run-time from generated code.
2936   //
2937   // The caller must ensure that arguments for floating-point placeholders
2938   // (such as %e, %f or %g) are VRegisters in format 1S or 1D, and that
2939   // arguments for integer placeholders are Registers.
2940   //
2941   // At the moment it is only possible to print the value of sp if it is the
2942   // current stack pointer. Otherwise, the MacroAssembler will automatically
2943   // update sp on every push (using BumpSystemStackPointer), so determining its
2944   // value is difficult.
2945   //
2946   // Format placeholders that refer to more than one argument, or to a specific
2947   // argument, are not supported. This includes formats like "%1$d" or "%.*d".
2948   //
2949   // This function automatically preserves caller-saved registers so that
2950   // calling code can use Printf at any point without having to worry about
2951   // corruption. The preservation mechanism generates a lot of code. If this is
2952   // a problem, preserve the important registers manually and then call
2953   // PrintfNoPreserve. Callee-saved registers are not used by Printf, and are
2954   // implicitly preserved.
2955   void Printf(const char * format,
2956               CPURegister arg0 = NoCPUReg,
2957               CPURegister arg1 = NoCPUReg,
2958               CPURegister arg2 = NoCPUReg,
2959               CPURegister arg3 = NoCPUReg);
2960 
2961   // Like Printf, but don't preserve any caller-saved registers, not even 'lr'.
2962   //
2963   // The return code from the system printf call will be returned in x0.
2964   void PrintfNoPreserve(const char * format,
2965                         const CPURegister& arg0 = NoCPUReg,
2966                         const CPURegister& arg1 = NoCPUReg,
2967                         const CPURegister& arg2 = NoCPUReg,
2968                         const CPURegister& arg3 = NoCPUReg);
2969 
2970   // Trace control when running the debug simulator.
2971   //
2972   // For example:
2973   //
2974   // __ Trace(LOG_REGS, TRACE_ENABLE);
2975   // Will add registers to the trace if it wasn't already the case.
2976   //
2977   // __ Trace(LOG_DISASM, TRACE_DISABLE);
2978   // Will stop logging disassembly. It has no effect if the disassembly wasn't
2979   // already being logged.
2980   void Trace(TraceParameters parameters, TraceCommand command);
2981 
2982   // Log the requested data independently of what is being traced.
2983   //
2984   // For example:
2985   //
2986   // __ Log(LOG_FLAGS)
2987   // Will output the flags.
2988   void Log(TraceParameters parameters);
2989 
2990   // Enable or disable instrumentation when an Instrument visitor is attached to
2991   // the simulator.
2992   void EnableInstrumentation();
2993   void DisableInstrumentation();
2994 
2995   // Add a marker to the instrumentation data produced by an Instrument visitor.
2996   // The name is a two character string that will be attached to the marker in
2997   // the output data.
2998   void AnnotateInstrumentation(const char* marker_name);
2999 
3000  private:
3001   // The actual Push and Pop implementations. These don't generate any code
3002   // other than that required for the push or pop. This allows
3003   // (Push|Pop)CPURegList to bundle together setup code for a large block of
3004   // registers.
3005   //
3006   // Note that size is per register, and is specified in bytes.
3007   void PushHelper(int count, int size,
3008                   const CPURegister& src0, const CPURegister& src1,
3009                   const CPURegister& src2, const CPURegister& src3);
3010   void PopHelper(int count, int size,
3011                  const CPURegister& dst0, const CPURegister& dst1,
3012                  const CPURegister& dst2, const CPURegister& dst3);
3013 
3014   void Movi16bitHelper(const VRegister& vd, uint64_t imm);
3015   void Movi32bitHelper(const VRegister& vd, uint64_t imm);
3016   void Movi64bitHelper(const VRegister& vd, uint64_t imm);
3017 
3018   // Perform necessary maintenance operations before a push or pop.
3019   //
3020   // Note that size is per register, and is specified in bytes.
3021   void PrepareForPush(int count, int size);
3022   void PrepareForPop(int count, int size);
3023 
3024   // The actual implementation of load and store operations for CPURegList.
3025   enum LoadStoreCPURegListAction {
3026     kLoad,
3027     kStore
3028   };
3029   void LoadStoreCPURegListHelper(LoadStoreCPURegListAction operation,
3030                                  CPURegList registers,
3031                                  const MemOperand& mem);
3032   // Returns a MemOperand suitable for loading or storing a CPURegList at `dst`.
3033   // This helper may allocate registers from `scratch_scope` and generate code
3034   // to compute an intermediate address. The resulting MemOperand is only valid
3035   // as long as `scratch_scope` remains valid.
3036   MemOperand BaseMemOperandForLoadStoreCPURegList(
3037       const CPURegList& registers,
3038       const MemOperand& mem,
3039       UseScratchRegisterScope* scratch_scope);
3040 
LabelIsOutOfRange(Label * label,ImmBranchType branch_type)3041   bool LabelIsOutOfRange(Label* label, ImmBranchType branch_type) {
3042     return !Instruction::IsValidImmPCOffset(branch_type,
3043                                             label->location() - CursorOffset());
3044   }
3045 
3046 #if VIXL_DEBUG
3047   // Tell whether any of the macro instruction can be used. When false the
3048   // MacroAssembler will assert if a method which can emit a variable number
3049   // of instructions is called.
3050   bool allow_macro_instructions_;
3051 #endif
3052 
3053   // The register to use as a stack pointer for stack operations.
3054   Register sp_;
3055 
3056   // Scratch registers available for use by the MacroAssembler.
3057   CPURegList tmp_list_;
3058   CPURegList fptmp_list_;
3059 
3060   LiteralPool literal_pool_;
3061   VeneerPool veneer_pool_;
3062 
3063   ptrdiff_t checkpoint_;
3064   ptrdiff_t recommended_checkpoint_;
3065 
3066   friend class Pool;
3067   friend class LiteralPool;
3068 };
3069 
3070 
OtherPoolsMaxSize()3071 inline size_t VeneerPool::OtherPoolsMaxSize() const {
3072   return masm_->LiteralPoolMaxSize();
3073 }
3074 
3075 
OtherPoolsMaxSize()3076 inline size_t LiteralPool::OtherPoolsMaxSize() const {
3077   return masm_->VeneerPoolMaxSize();
3078 }
3079 
3080 
SetNextRecommendedCheckpoint(ptrdiff_t offset)3081 inline void LiteralPool::SetNextRecommendedCheckpoint(ptrdiff_t offset) {
3082   masm_->recommended_checkpoint_ =
3083       std::min(masm_->recommended_checkpoint_, offset);
3084   recommended_checkpoint_ = offset;
3085 }
3086 
3087 // Use this scope when you need a one-to-one mapping between methods and
3088 // instructions. This scope prevents the MacroAssembler from being called and
3089 // literal pools from being emitted. It also asserts the number of instructions
3090 // emitted is what you specified when creating the scope.
3091 class InstructionAccurateScope : public CodeBufferCheckScope {
3092  public:
3093   InstructionAccurateScope(MacroAssembler* masm,
3094                            int count,
3095                            AssertPolicy policy = kExactSize)
3096       : CodeBufferCheckScope(masm,
3097                              (count * kInstructionSize),
3098                              kCheck,
3099                              policy) {
3100     VIXL_ASSERT(policy != kNoAssert);
3101 #ifdef VIXL_DEBUG
3102     old_allow_macro_instructions_ = masm->AllowMacroInstructions();
3103     masm->SetAllowMacroInstructions(false);
3104 #endif
3105   }
3106 
~InstructionAccurateScope()3107   ~InstructionAccurateScope() {
3108 #ifdef VIXL_DEBUG
3109     MacroAssembler* masm = reinterpret_cast<MacroAssembler*>(assm_);
3110     masm->SetAllowMacroInstructions(old_allow_macro_instructions_);
3111 #endif
3112   }
3113 
3114  private:
3115 #ifdef VIXL_DEBUG
3116   bool old_allow_macro_instructions_;
3117 #endif
3118 };
3119 
3120 
3121 class BlockLiteralPoolScope {
3122  public:
BlockLiteralPoolScope(MacroAssembler * masm)3123   explicit BlockLiteralPoolScope(MacroAssembler* masm) : masm_(masm) {
3124     masm_->BlockLiteralPool();
3125   }
3126 
~BlockLiteralPoolScope()3127   ~BlockLiteralPoolScope() {
3128     masm_->ReleaseLiteralPool();
3129   }
3130 
3131  private:
3132   MacroAssembler* masm_;
3133 };
3134 
3135 
3136 class BlockVeneerPoolScope {
3137  public:
BlockVeneerPoolScope(MacroAssembler * masm)3138   explicit BlockVeneerPoolScope(MacroAssembler* masm) : masm_(masm) {
3139     masm_->BlockVeneerPool();
3140   }
3141 
~BlockVeneerPoolScope()3142   ~BlockVeneerPoolScope() {
3143     masm_->ReleaseVeneerPool();
3144   }
3145 
3146  private:
3147   MacroAssembler* masm_;
3148 };
3149 
3150 
3151 class BlockPoolsScope {
3152  public:
BlockPoolsScope(MacroAssembler * masm)3153   explicit BlockPoolsScope(MacroAssembler* masm) : masm_(masm) {
3154     masm_->BlockLiteralPool();
3155     masm_->BlockVeneerPool();
3156   }
3157 
~BlockPoolsScope()3158   ~BlockPoolsScope() {
3159     masm_->ReleaseLiteralPool();
3160     masm_->ReleaseVeneerPool();
3161   }
3162 
3163  private:
3164   MacroAssembler* masm_;
3165 };
3166 
3167 
3168 // This scope utility allows scratch registers to be managed safely. The
3169 // MacroAssembler's TmpList() (and FPTmpList()) is used as a pool of scratch
3170 // registers. These registers can be allocated on demand, and will be returned
3171 // at the end of the scope.
3172 //
3173 // When the scope ends, the MacroAssembler's lists will be restored to their
3174 // original state, even if the lists were modified by some other means.
3175 class UseScratchRegisterScope {
3176  public:
3177   // This constructor implicitly calls the `Open` function to initialise the
3178   // scope, so it is ready to use immediately after it has been constructed.
3179   explicit UseScratchRegisterScope(MacroAssembler* masm);
3180   // This constructor allows deferred and optional initialisation of the scope.
3181   // The user is required to explicitly call the `Open` function before using
3182   // the scope.
3183   UseScratchRegisterScope();
3184   // This function performs the actual initialisation work.
3185   void Open(MacroAssembler* masm);
3186 
3187   // The destructor always implicitly calls the `Close` function.
3188   ~UseScratchRegisterScope();
3189   // This function performs the cleaning-up work. It must succeed even if the
3190   // scope has not been opened. It is safe to call multiple times.
3191   void Close();
3192 
3193 
3194   bool IsAvailable(const CPURegister& reg) const;
3195 
3196 
3197   // Take a register from the appropriate temps list. It will be returned
3198   // automatically when the scope ends.
AcquireW()3199   Register AcquireW() { return AcquireNextAvailable(available_).W(); }
AcquireX()3200   Register AcquireX() { return AcquireNextAvailable(available_).X(); }
AcquireS()3201   VRegister AcquireS() { return AcquireNextAvailable(availablefp_).S(); }
AcquireD()3202   VRegister AcquireD() { return AcquireNextAvailable(availablefp_).D(); }
3203 
3204 
3205   Register AcquireSameSizeAs(const Register& reg);
3206   VRegister AcquireSameSizeAs(const VRegister& reg);
3207 
3208 
3209   // Explicitly release an acquired (or excluded) register, putting it back in
3210   // the appropriate temps list.
3211   void Release(const CPURegister& reg);
3212 
3213 
3214   // Make the specified registers available as scratch registers for the
3215   // duration of this scope.
3216   void Include(const CPURegList& list);
3217   void Include(const Register& reg1,
3218                const Register& reg2 = NoReg,
3219                const Register& reg3 = NoReg,
3220                const Register& reg4 = NoReg);
3221   void Include(const VRegister& reg1,
3222                const VRegister& reg2 = NoVReg,
3223                const VRegister& reg3 = NoVReg,
3224                const VRegister& reg4 = NoVReg);
3225 
3226 
3227   // Make sure that the specified registers are not available in this scope.
3228   // This can be used to prevent helper functions from using sensitive
3229   // registers, for example.
3230   void Exclude(const CPURegList& list);
3231   void Exclude(const Register& reg1,
3232                const Register& reg2 = NoReg,
3233                const Register& reg3 = NoReg,
3234                const Register& reg4 = NoReg);
3235   void Exclude(const VRegister& reg1,
3236                const VRegister& reg2 = NoVReg,
3237                const VRegister& reg3 = NoVReg,
3238                const VRegister& reg4 = NoVReg);
3239   void Exclude(const CPURegister& reg1,
3240                const CPURegister& reg2 = NoCPUReg,
3241                const CPURegister& reg3 = NoCPUReg,
3242                const CPURegister& reg4 = NoCPUReg);
3243 
3244 
3245   // Prevent any scratch registers from being used in this scope.
3246   void ExcludeAll();
3247 
3248 
3249  private:
3250   static CPURegister AcquireNextAvailable(CPURegList* available);
3251 
3252   static void ReleaseByCode(CPURegList* available, int code);
3253 
3254   static void ReleaseByRegList(CPURegList* available,
3255                                RegList regs);
3256 
3257   static void IncludeByRegList(CPURegList* available,
3258                                RegList exclude);
3259 
3260   static void ExcludeByRegList(CPURegList* available,
3261                                RegList exclude);
3262 
3263   // Available scratch registers.
3264   CPURegList* available_;     // kRegister
3265   CPURegList* availablefp_;   // kVRegister
3266 
3267   // The state of the available lists at the start of this scope.
3268   RegList old_available_;     // kRegister
3269   RegList old_availablefp_;   // kVRegister
3270 #ifdef VIXL_DEBUG
3271   bool initialised_;
3272 #endif
3273 
3274   // Disallow copy constructor and operator=.
UseScratchRegisterScope(const UseScratchRegisterScope &)3275   UseScratchRegisterScope(const UseScratchRegisterScope&) {
3276     VIXL_UNREACHABLE();
3277   }
3278   void operator=(const UseScratchRegisterScope&) {
3279     VIXL_UNREACHABLE();
3280   }
3281 };
3282 
3283 
3284 }  // namespace vixl
3285 
3286 #endif  // VIXL_A64_MACRO_ASSEMBLER_A64_H_
3287