1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "assembler_riscv64.h"
18 
19 #include "base/bit_utils.h"
20 #include "base/casts.h"
21 #include "base/logging.h"
22 #include "base/memory_region.h"
23 
24 namespace art HIDDEN {
25 namespace riscv64 {
26 
27 static_assert(static_cast<size_t>(kRiscv64PointerSize) == kRiscv64DoublewordSize,
28               "Unexpected Riscv64 pointer size.");
29 static_assert(kRiscv64PointerSize == PointerSize::k64, "Unexpected Riscv64 pointer size.");
30 
31 // Split 32-bit offset into an `imm20` for LUI/AUIPC and
32 // a signed 12-bit short offset for ADDI/JALR/etc.
SplitOffset(int32_t offset)33 ALWAYS_INLINE static inline std::pair<uint32_t, int32_t> SplitOffset(int32_t offset) {
34   // The highest 0x800 values are out of range.
35   DCHECK_LT(offset, 0x7ffff800);
36   // Round `offset` to nearest 4KiB offset because short offset has range [-0x800, 0x800).
37   int32_t near_offset = (offset + 0x800) & ~0xfff;
38   // Calculate the short offset.
39   int32_t short_offset = offset - near_offset;
40   DCHECK(IsInt<12>(short_offset));
41   // Extract the `imm20`.
42   uint32_t imm20 = static_cast<uint32_t>(near_offset) >> 12;
43   // Return the result as a pair.
44   return std::make_pair(imm20, short_offset);
45 }
46 
ToInt12(uint32_t uint12)47 ALWAYS_INLINE static inline int32_t ToInt12(uint32_t uint12) {
48   DCHECK(IsUint<12>(uint12));
49   return static_cast<int32_t>(uint12 - ((uint12 & 0x800) << 1));
50 }
51 
FinalizeCode()52 void Riscv64Assembler::FinalizeCode() {
53   CHECK(!finalized_);
54   Assembler::FinalizeCode();
55   ReserveJumpTableSpace();
56   EmitLiterals();
57   PromoteBranches();
58   EmitBranches();
59   EmitJumpTables();
60   PatchCFI();
61   finalized_ = true;
62 }
63 
64 /////////////////////////////// RV64 VARIANTS extension ///////////////////////////////
65 
66 //////////////////////////////// RV64 "I" Instructions ////////////////////////////////
67 
68 // LUI/AUIPC (RV32I, with sign-extension on RV64I), opcode = 0x17, 0x37
69 
Lui(XRegister rd,uint32_t imm20)70 void Riscv64Assembler::Lui(XRegister rd, uint32_t imm20) {
71   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
72     int32_t simm = static_cast<int32_t>(imm20);
73     if (rd != Zero && rd != SP && IsImmCLuiEncodable(imm20)) {
74       CLui(rd, imm20);
75       return;
76     }
77   }
78 
79   EmitU(imm20, rd, 0x37);
80 }
81 
Auipc(XRegister rd,uint32_t imm20)82 void Riscv64Assembler::Auipc(XRegister rd, uint32_t imm20) {
83   EmitU(imm20, rd, 0x17);
84 }
85 
86 // Jump instructions (RV32I), opcode = 0x67, 0x6f
87 
Jal(XRegister rd,int32_t offset)88 void Riscv64Assembler::Jal(XRegister rd, int32_t offset) {
89   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
90     if (rd == Zero && IsInt<12>(offset)) {
91       CJ(offset);
92       return;
93     }
94     // Note: `c.jal` is RV32-only.
95   }
96 
97   EmitJ(offset, rd, 0x6F);
98 }
99 
Jalr(XRegister rd,XRegister rs1,int32_t offset)100 void Riscv64Assembler::Jalr(XRegister rd, XRegister rs1, int32_t offset) {
101   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
102     if (rd == RA && rs1 != Zero && offset == 0) {
103       CJalr(rs1);
104       return;
105     } else if (rd == Zero && rs1 != Zero && offset == 0) {
106       CJr(rs1);
107       return;
108     }
109   }
110 
111   EmitI(offset, rs1, 0x0, rd, 0x67);
112 }
113 
114 // Branch instructions, opcode = 0x63 (subfunc from 0x0 ~ 0x7), 0x67, 0x6f
115 
Beq(XRegister rs1,XRegister rs2,int32_t offset)116 void Riscv64Assembler::Beq(XRegister rs1, XRegister rs2, int32_t offset) {
117   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
118     if (rs2 == Zero && IsShortReg(rs1) && IsInt<9>(offset)) {
119       CBeqz(rs1, offset);
120       return;
121     } else if (rs1 == Zero && IsShortReg(rs2) && IsInt<9>(offset)) {
122       CBeqz(rs2, offset);
123       return;
124     }
125   }
126 
127   EmitB(offset, rs2, rs1, 0x0, 0x63);
128 }
129 
Bne(XRegister rs1,XRegister rs2,int32_t offset)130 void Riscv64Assembler::Bne(XRegister rs1, XRegister rs2, int32_t offset) {
131   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
132     if (rs2 == Zero && IsShortReg(rs1) && IsInt<9>(offset)) {
133       CBnez(rs1, offset);
134       return;
135     } else if (rs1 == Zero && IsShortReg(rs2) && IsInt<9>(offset)) {
136       CBnez(rs2, offset);
137       return;
138     }
139   }
140 
141   EmitB(offset, rs2, rs1, 0x1, 0x63);
142 }
143 
Blt(XRegister rs1,XRegister rs2,int32_t offset)144 void Riscv64Assembler::Blt(XRegister rs1, XRegister rs2, int32_t offset) {
145   EmitB(offset, rs2, rs1, 0x4, 0x63);
146 }
147 
Bge(XRegister rs1,XRegister rs2,int32_t offset)148 void Riscv64Assembler::Bge(XRegister rs1, XRegister rs2, int32_t offset) {
149   EmitB(offset, rs2, rs1, 0x5, 0x63);
150 }
151 
Bltu(XRegister rs1,XRegister rs2,int32_t offset)152 void Riscv64Assembler::Bltu(XRegister rs1, XRegister rs2, int32_t offset) {
153   EmitB(offset, rs2, rs1, 0x6, 0x63);
154 }
155 
Bgeu(XRegister rs1,XRegister rs2,int32_t offset)156 void Riscv64Assembler::Bgeu(XRegister rs1, XRegister rs2, int32_t offset) {
157   EmitB(offset, rs2, rs1, 0x7, 0x63);
158 }
159 
160 // Load instructions (RV32I+RV64I): opcode = 0x03, funct3 from 0x0 ~ 0x6
161 
Lb(XRegister rd,XRegister rs1,int32_t offset)162 void Riscv64Assembler::Lb(XRegister rd, XRegister rs1, int32_t offset) {
163   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
164   EmitI(offset, rs1, 0x0, rd, 0x03);
165 }
166 
Lh(XRegister rd,XRegister rs1,int32_t offset)167 void Riscv64Assembler::Lh(XRegister rd, XRegister rs1, int32_t offset) {
168   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
169 
170   if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
171     if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<2>(offset) && IsAligned<2>(offset)) {
172       CLh(rd, rs1, offset);
173       return;
174     }
175   }
176 
177   EmitI(offset, rs1, 0x1, rd, 0x03);
178 }
179 
Lw(XRegister rd,XRegister rs1,int32_t offset)180 void Riscv64Assembler::Lw(XRegister rd, XRegister rs1, int32_t offset) {
181   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
182 
183   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
184     if (rd != Zero && rs1 == SP && IsUint<8>(offset) && IsAligned<4>(offset)) {
185       CLwsp(rd, offset);
186       return;
187     } else if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<7>(offset) && IsAligned<4>(offset)) {
188       CLw(rd, rs1, offset);
189       return;
190     }
191   }
192 
193   EmitI(offset, rs1, 0x2, rd, 0x03);
194 }
195 
Ld(XRegister rd,XRegister rs1,int32_t offset)196 void Riscv64Assembler::Ld(XRegister rd, XRegister rs1, int32_t offset) {
197   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
198 
199   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
200     if (rd != Zero && rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
201       CLdsp(rd, offset);
202       return;
203     } else if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
204       CLd(rd, rs1, offset);
205       return;
206     }
207   }
208 
209   EmitI(offset, rs1, 0x3, rd, 0x03);
210 }
211 
Lbu(XRegister rd,XRegister rs1,int32_t offset)212 void Riscv64Assembler::Lbu(XRegister rd, XRegister rs1, int32_t offset) {
213   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
214 
215   if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
216     if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<2>(offset)) {
217       CLbu(rd, rs1, offset);
218       return;
219     }
220   }
221 
222   EmitI(offset, rs1, 0x4, rd, 0x03);
223 }
224 
Lhu(XRegister rd,XRegister rs1,int32_t offset)225 void Riscv64Assembler::Lhu(XRegister rd, XRegister rs1, int32_t offset) {
226   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
227 
228   if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
229     if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<2>(offset) && IsAligned<2>(offset)) {
230       CLhu(rd, rs1, offset);
231       return;
232     }
233   }
234 
235   EmitI(offset, rs1, 0x5, rd, 0x03);
236 }
237 
Lwu(XRegister rd,XRegister rs1,int32_t offset)238 void Riscv64Assembler::Lwu(XRegister rd, XRegister rs1, int32_t offset) {
239   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
240   EmitI(offset, rs1, 0x6, rd, 0x3);
241 }
242 
243 // Store instructions (RV32I+RV64I): opcode = 0x23, funct3 from 0x0 ~ 0x3
244 
Sb(XRegister rs2,XRegister rs1,int32_t offset)245 void Riscv64Assembler::Sb(XRegister rs2, XRegister rs1, int32_t offset) {
246   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
247 
248   if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
249     if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<2>(offset)) {
250       CSb(rs2, rs1, offset);
251       return;
252     }
253   }
254 
255   EmitS(offset, rs2, rs1, 0x0, 0x23);
256 }
257 
Sh(XRegister rs2,XRegister rs1,int32_t offset)258 void Riscv64Assembler::Sh(XRegister rs2, XRegister rs1, int32_t offset) {
259   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
260 
261   if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
262     if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<2>(offset) && IsAligned<2>(offset)) {
263       CSh(rs2, rs1, offset);
264       return;
265     }
266   }
267 
268   EmitS(offset, rs2, rs1, 0x1, 0x23);
269 }
270 
Sw(XRegister rs2,XRegister rs1,int32_t offset)271 void Riscv64Assembler::Sw(XRegister rs2, XRegister rs1, int32_t offset) {
272   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
273 
274   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
275     if (rs1 == SP && IsUint<8>(offset) && IsAligned<4>(offset)) {
276       CSwsp(rs2, offset);
277       return;
278     } else if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<7>(offset) && IsAligned<4>(offset)) {
279       CSw(rs2, rs1, offset);
280       return;
281     }
282   }
283 
284   EmitS(offset, rs2, rs1, 0x2, 0x23);
285 }
286 
Sd(XRegister rs2,XRegister rs1,int32_t offset)287 void Riscv64Assembler::Sd(XRegister rs2, XRegister rs1, int32_t offset) {
288   AssertExtensionsEnabled(Riscv64Extension::kLoadStore);
289 
290   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
291     if (rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
292       CSdsp(rs2, offset);
293       return;
294     } else if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
295       CSd(rs2, rs1, offset);
296       return;
297     }
298   }
299 
300   EmitS(offset, rs2, rs1, 0x3, 0x23);
301 }
302 
303 // IMM ALU instructions (RV32I): opcode = 0x13, funct3 from 0x0 ~ 0x7
304 
Addi(XRegister rd,XRegister rs1,int32_t imm12)305 void Riscv64Assembler::Addi(XRegister rd, XRegister rs1, int32_t imm12) {
306   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
307     if (rd != Zero) {
308       if (rs1 == Zero && IsInt<6>(imm12)) {
309         CLi(rd, imm12);
310         return;
311       } else if (imm12 != 0) {
312         if (rd == rs1) {
313           // We're testing against clang's assembler and therefore
314           // if both c.addi and c.addi16sp are viable, we use the c.addi just like clang.
315           if (IsInt<6>(imm12)) {
316             CAddi(rd, imm12);
317             return;
318           } else if (rd == SP && IsInt<10>(imm12) && IsAligned<16>(imm12)) {
319             CAddi16Sp(imm12);
320             return;
321           }
322         } else if (IsShortReg(rd) && rs1 == SP && IsUint<10>(imm12) && IsAligned<4>(imm12)) {
323           CAddi4Spn(rd, imm12);
324           return;
325         }
326       } else if (rs1 != Zero) {
327         CMv(rd, rs1);
328         return;
329       }
330     } else if (rd == rs1 && imm12 == 0) {
331       CNop();
332       return;
333     }
334   }
335 
336   EmitI(imm12, rs1, 0x0, rd, 0x13);
337 }
338 
Slti(XRegister rd,XRegister rs1,int32_t imm12)339 void Riscv64Assembler::Slti(XRegister rd, XRegister rs1, int32_t imm12) {
340   EmitI(imm12, rs1, 0x2, rd, 0x13);
341 }
342 
Sltiu(XRegister rd,XRegister rs1,int32_t imm12)343 void Riscv64Assembler::Sltiu(XRegister rd, XRegister rs1, int32_t imm12) {
344   EmitI(imm12, rs1, 0x3, rd, 0x13);
345 }
346 
Xori(XRegister rd,XRegister rs1,int32_t imm12)347 void Riscv64Assembler::Xori(XRegister rd, XRegister rs1, int32_t imm12) {
348   if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
349     if (rd == rs1 && IsShortReg(rd) && imm12 == -1) {
350       CNot(rd);
351       return;
352     }
353   }
354 
355   EmitI(imm12, rs1, 0x4, rd, 0x13);
356 }
357 
Ori(XRegister rd,XRegister rs1,int32_t imm12)358 void Riscv64Assembler::Ori(XRegister rd, XRegister rs1, int32_t imm12) {
359   EmitI(imm12, rs1, 0x6, rd, 0x13);
360 }
361 
Andi(XRegister rd,XRegister rs1,int32_t imm12)362 void Riscv64Assembler::Andi(XRegister rd, XRegister rs1, int32_t imm12) {
363   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
364     if (rd == rs1 && IsShortReg(rd) && IsInt<6>(imm12)) {
365       CAndi(rd, imm12);
366       return;
367     }
368   }
369 
370   EmitI(imm12, rs1, 0x7, rd, 0x13);
371 }
372 
373 // 0x1 Split: 0x0(6b) + imm12(6b)
Slli(XRegister rd,XRegister rs1,int32_t shamt)374 void Riscv64Assembler::Slli(XRegister rd, XRegister rs1, int32_t shamt) {
375   CHECK_LT(static_cast<uint32_t>(shamt), 64u);
376 
377   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
378     if (rd == rs1 && rd != Zero && shamt != 0) {
379       CSlli(rd, shamt);
380       return;
381     }
382   }
383 
384   EmitI6(0x0, shamt, rs1, 0x1, rd, 0x13);
385 }
386 
387 // 0x5 Split: 0x0(6b) + imm12(6b)
Srli(XRegister rd,XRegister rs1,int32_t shamt)388 void Riscv64Assembler::Srli(XRegister rd, XRegister rs1, int32_t shamt) {
389   CHECK_LT(static_cast<uint32_t>(shamt), 64u);
390 
391   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
392     if (rd == rs1 && IsShortReg(rd) && shamt != 0) {
393       CSrli(rd, shamt);
394       return;
395     }
396   }
397 
398   EmitI6(0x0, shamt, rs1, 0x5, rd, 0x13);
399 }
400 
401 // 0x5 Split: 0x10(6b) + imm12(6b)
Srai(XRegister rd,XRegister rs1,int32_t shamt)402 void Riscv64Assembler::Srai(XRegister rd, XRegister rs1, int32_t shamt) {
403   CHECK_LT(static_cast<uint32_t>(shamt), 64u);
404 
405   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
406     if (rd == rs1 && IsShortReg(rd) && shamt != 0) {
407       CSrai(rd, shamt);
408       return;
409     }
410   }
411 
412   EmitI6(0x10, shamt, rs1, 0x5, rd, 0x13);
413 }
414 
415 // ALU instructions (RV32I): opcode = 0x33, funct3 from 0x0 ~ 0x7
416 
Add(XRegister rd,XRegister rs1,XRegister rs2)417 void Riscv64Assembler::Add(XRegister rd, XRegister rs1, XRegister rs2) {
418   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
419     if (rd != Zero) {
420       if (rs1 != Zero || rs2 != Zero) {
421         if (rs1 == Zero) {
422           DCHECK_NE(rs2, Zero);
423           CMv(rd, rs2);
424           return;
425         } else if (rs2 == Zero) {
426           DCHECK_NE(rs1, Zero);
427           CMv(rd, rs1);
428           return;
429         } else if (rd == rs1) {
430           DCHECK_NE(rs2, Zero);
431           CAdd(rd, rs2);
432           return;
433         } else if (rd == rs2) {
434           DCHECK_NE(rs1, Zero);
435           CAdd(rd, rs1);
436           return;
437         }
438       } else {
439         // TODO: we use clang for testing assembler and unfortunately it (clang 18.0.1) does not
440         // support conversion from 'add rd, Zero, Zero' into 'c.li. rd, 0' so once clang supports it
441         // the lines below should be uncommented
442 
443         // CLi(rd, 0);
444         // return;
445       }
446     }
447   }
448 
449   EmitR(0x0, rs2, rs1, 0x0, rd, 0x33);
450 }
451 
Sub(XRegister rd,XRegister rs1,XRegister rs2)452 void Riscv64Assembler::Sub(XRegister rd, XRegister rs1, XRegister rs2) {
453   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
454     if (rd == rs1 && IsShortReg(rd) && IsShortReg(rs2)) {
455       CSub(rd, rs2);
456       return;
457     }
458   }
459 
460   EmitR(0x20, rs2, rs1, 0x0, rd, 0x33);
461 }
462 
Slt(XRegister rd,XRegister rs1,XRegister rs2)463 void Riscv64Assembler::Slt(XRegister rd, XRegister rs1, XRegister rs2) {
464   EmitR(0x0, rs2, rs1, 0x02, rd, 0x33);
465 }
466 
Sltu(XRegister rd,XRegister rs1,XRegister rs2)467 void Riscv64Assembler::Sltu(XRegister rd, XRegister rs1, XRegister rs2) {
468   EmitR(0x0, rs2, rs1, 0x03, rd, 0x33);
469 }
470 
Xor(XRegister rd,XRegister rs1,XRegister rs2)471 void Riscv64Assembler::Xor(XRegister rd, XRegister rs1, XRegister rs2) {
472   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
473     if (IsShortReg(rd)) {
474       if (rd == rs1 && IsShortReg(rs2)) {
475         CXor(rd, rs2);
476         return;
477       } else if (rd == rs2 && IsShortReg(rs1)) {
478         CXor(rd, rs1);
479         return;
480       }
481     }
482   }
483 
484   EmitR(0x0, rs2, rs1, 0x04, rd, 0x33);
485 }
486 
Or(XRegister rd,XRegister rs1,XRegister rs2)487 void Riscv64Assembler::Or(XRegister rd, XRegister rs1, XRegister rs2) {
488   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
489     if (IsShortReg(rd)) {
490       if (rd == rs1 && IsShortReg(rs2)) {
491         COr(rd, rs2);
492         return;
493       } else if (rd == rs2 && IsShortReg(rs1)) {
494         COr(rd, rs1);
495         return;
496       }
497     }
498   }
499 
500   EmitR(0x0, rs2, rs1, 0x06, rd, 0x33);
501 }
502 
And(XRegister rd,XRegister rs1,XRegister rs2)503 void Riscv64Assembler::And(XRegister rd, XRegister rs1, XRegister rs2) {
504   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
505     if (IsShortReg(rd)) {
506       if (rd == rs1 && IsShortReg(rs2)) {
507         CAnd(rd, rs2);
508         return;
509       } else if (rd == rs2 && IsShortReg(rs1)) {
510         CAnd(rd, rs1);
511         return;
512       }
513     }
514   }
515 
516   EmitR(0x0, rs2, rs1, 0x07, rd, 0x33);
517 }
518 
Sll(XRegister rd,XRegister rs1,XRegister rs2)519 void Riscv64Assembler::Sll(XRegister rd, XRegister rs1, XRegister rs2) {
520   EmitR(0x0, rs2, rs1, 0x01, rd, 0x33);
521 }
522 
Srl(XRegister rd,XRegister rs1,XRegister rs2)523 void Riscv64Assembler::Srl(XRegister rd, XRegister rs1, XRegister rs2) {
524   EmitR(0x0, rs2, rs1, 0x05, rd, 0x33);
525 }
526 
Sra(XRegister rd,XRegister rs1,XRegister rs2)527 void Riscv64Assembler::Sra(XRegister rd, XRegister rs1, XRegister rs2) {
528   EmitR(0x20, rs2, rs1, 0x05, rd, 0x33);
529 }
530 
531 // 32bit Imm ALU instructions (RV64I): opcode = 0x1b, funct3 from 0x0, 0x1, 0x5
532 
Addiw(XRegister rd,XRegister rs1,int32_t imm12)533 void Riscv64Assembler::Addiw(XRegister rd, XRegister rs1, int32_t imm12) {
534   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
535     if (rd != Zero && IsInt<6>(imm12)) {
536       if (rd == rs1) {
537         CAddiw(rd, imm12);
538         return;
539       } else if (rs1 == Zero) {
540         CLi(rd, imm12);
541         return;
542       }
543     }
544   }
545 
546   EmitI(imm12, rs1, 0x0, rd, 0x1b);
547 }
548 
Slliw(XRegister rd,XRegister rs1,int32_t shamt)549 void Riscv64Assembler::Slliw(XRegister rd, XRegister rs1, int32_t shamt) {
550   CHECK_LT(static_cast<uint32_t>(shamt), 32u);
551   EmitR(0x0, shamt, rs1, 0x1, rd, 0x1b);
552 }
553 
Srliw(XRegister rd,XRegister rs1,int32_t shamt)554 void Riscv64Assembler::Srliw(XRegister rd, XRegister rs1, int32_t shamt) {
555   CHECK_LT(static_cast<uint32_t>(shamt), 32u);
556   EmitR(0x0, shamt, rs1, 0x5, rd, 0x1b);
557 }
558 
Sraiw(XRegister rd,XRegister rs1,int32_t shamt)559 void Riscv64Assembler::Sraiw(XRegister rd, XRegister rs1, int32_t shamt) {
560   CHECK_LT(static_cast<uint32_t>(shamt), 32u);
561   EmitR(0x20, shamt, rs1, 0x5, rd, 0x1b);
562 }
563 
564 // 32bit ALU instructions (RV64I): opcode = 0x3b, funct3 from 0x0 ~ 0x7
565 
Addw(XRegister rd,XRegister rs1,XRegister rs2)566 void Riscv64Assembler::Addw(XRegister rd, XRegister rs1, XRegister rs2) {
567   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
568     if (IsShortReg(rd)) {
569       if (rd == rs1 && IsShortReg(rs2)) {
570         CAddw(rd, rs2);
571         return;
572       } else if (rd == rs2 && IsShortReg(rs1)) {
573         CAddw(rd, rs1);
574         return;
575       }
576     }
577   }
578 
579   EmitR(0x0, rs2, rs1, 0x0, rd, 0x3b);
580 }
581 
Subw(XRegister rd,XRegister rs1,XRegister rs2)582 void Riscv64Assembler::Subw(XRegister rd, XRegister rs1, XRegister rs2) {
583   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
584     if (rd == rs1 && IsShortReg(rd) && IsShortReg(rs2)) {
585       CSubw(rd, rs2);
586       return;
587     }
588   }
589 
590   EmitR(0x20, rs2, rs1, 0x0, rd, 0x3b);
591 }
592 
Sllw(XRegister rd,XRegister rs1,XRegister rs2)593 void Riscv64Assembler::Sllw(XRegister rd, XRegister rs1, XRegister rs2) {
594   EmitR(0x0, rs2, rs1, 0x1, rd, 0x3b);
595 }
596 
Srlw(XRegister rd,XRegister rs1,XRegister rs2)597 void Riscv64Assembler::Srlw(XRegister rd, XRegister rs1, XRegister rs2) {
598   EmitR(0x0, rs2, rs1, 0x5, rd, 0x3b);
599 }
600 
Sraw(XRegister rd,XRegister rs1,XRegister rs2)601 void Riscv64Assembler::Sraw(XRegister rd, XRegister rs1, XRegister rs2) {
602   EmitR(0x20, rs2, rs1, 0x5, rd, 0x3b);
603 }
604 
605 // Environment call and breakpoint (RV32I), opcode = 0x73
606 
Ecall()607 void Riscv64Assembler::Ecall() { EmitI(0x0, 0x0, 0x0, 0x0, 0x73); }
608 
Ebreak()609 void Riscv64Assembler::Ebreak() {
610   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
611     CEbreak();
612     return;
613   }
614 
615   EmitI(0x1, 0x0, 0x0, 0x0, 0x73);
616 }
617 
618 // Fence instruction (RV32I): opcode = 0xf, funct3 = 0
619 
Fence(uint32_t pred,uint32_t succ)620 void Riscv64Assembler::Fence(uint32_t pred, uint32_t succ) {
621   DCHECK(IsUint<4>(pred));
622   DCHECK(IsUint<4>(succ));
623   EmitI(/* normal fence */ 0x0 << 8 | pred << 4 | succ, 0x0, 0x0, 0x0, 0xf);
624 }
625 
FenceTso()626 void Riscv64Assembler::FenceTso() {
627   static constexpr uint32_t kPred = kFenceWrite | kFenceRead;
628   static constexpr uint32_t kSucc = kFenceWrite | kFenceRead;
629   EmitI(ToInt12(/* TSO fence */ 0x8 << 8 | kPred << 4 | kSucc), 0x0, 0x0, 0x0, 0xf);
630 }
631 
632 //////////////////////////////// RV64 "I" Instructions  END ////////////////////////////////
633 
634 /////////////////////////// RV64 "Zifencei" Instructions  START ////////////////////////////
635 
636 // "Zifencei" Standard Extension, opcode = 0xf, funct3 = 1
FenceI()637 void Riscv64Assembler::FenceI() {
638   AssertExtensionsEnabled(Riscv64Extension::kZifencei);
639   EmitI(0x0, 0x0, 0x1, 0x0, 0xf);
640 }
641 
642 //////////////////////////// RV64 "Zifencei" Instructions  END /////////////////////////////
643 
644 /////////////////////////////// RV64 "M" Instructions  START ///////////////////////////////
645 
646 // RV32M Standard Extension: opcode = 0x33, funct3 from 0x0 ~ 0x7
647 
Mul(XRegister rd,XRegister rs1,XRegister rs2)648 void Riscv64Assembler::Mul(XRegister rd, XRegister rs1, XRegister rs2) {
649   AssertExtensionsEnabled(Riscv64Extension::kM);
650 
651   if (IsExtensionEnabled(Riscv64Extension::kZcb)) {
652     if (IsShortReg(rd)) {
653       if (rd == rs1 && IsShortReg(rs2)) {
654         CMul(rd, rs2);
655         return;
656       } else if (rd == rs2 && IsShortReg(rs1)) {
657         CMul(rd, rs1);
658         return;
659       }
660     }
661   }
662 
663   EmitR(0x1, rs2, rs1, 0x0, rd, 0x33);
664 }
665 
Mulh(XRegister rd,XRegister rs1,XRegister rs2)666 void Riscv64Assembler::Mulh(XRegister rd, XRegister rs1, XRegister rs2) {
667   AssertExtensionsEnabled(Riscv64Extension::kM);
668   EmitR(0x1, rs2, rs1, 0x1, rd, 0x33);
669 }
670 
Mulhsu(XRegister rd,XRegister rs1,XRegister rs2)671 void Riscv64Assembler::Mulhsu(XRegister rd, XRegister rs1, XRegister rs2) {
672   AssertExtensionsEnabled(Riscv64Extension::kM);
673   EmitR(0x1, rs2, rs1, 0x2, rd, 0x33);
674 }
675 
Mulhu(XRegister rd,XRegister rs1,XRegister rs2)676 void Riscv64Assembler::Mulhu(XRegister rd, XRegister rs1, XRegister rs2) {
677   AssertExtensionsEnabled(Riscv64Extension::kM);
678   EmitR(0x1, rs2, rs1, 0x3, rd, 0x33);
679 }
680 
Div(XRegister rd,XRegister rs1,XRegister rs2)681 void Riscv64Assembler::Div(XRegister rd, XRegister rs1, XRegister rs2) {
682   AssertExtensionsEnabled(Riscv64Extension::kM);
683   EmitR(0x1, rs2, rs1, 0x4, rd, 0x33);
684 }
685 
Divu(XRegister rd,XRegister rs1,XRegister rs2)686 void Riscv64Assembler::Divu(XRegister rd, XRegister rs1, XRegister rs2) {
687   AssertExtensionsEnabled(Riscv64Extension::kM);
688   EmitR(0x1, rs2, rs1, 0x5, rd, 0x33);
689 }
690 
Rem(XRegister rd,XRegister rs1,XRegister rs2)691 void Riscv64Assembler::Rem(XRegister rd, XRegister rs1, XRegister rs2) {
692   AssertExtensionsEnabled(Riscv64Extension::kM);
693   EmitR(0x1, rs2, rs1, 0x6, rd, 0x33);
694 }
695 
Remu(XRegister rd,XRegister rs1,XRegister rs2)696 void Riscv64Assembler::Remu(XRegister rd, XRegister rs1, XRegister rs2) {
697   AssertExtensionsEnabled(Riscv64Extension::kM);
698   EmitR(0x1, rs2, rs1, 0x7, rd, 0x33);
699 }
700 
701 // RV64M Standard Extension: opcode = 0x3b, funct3 0x0 and from 0x4 ~ 0x7
702 
Mulw(XRegister rd,XRegister rs1,XRegister rs2)703 void Riscv64Assembler::Mulw(XRegister rd, XRegister rs1, XRegister rs2) {
704   AssertExtensionsEnabled(Riscv64Extension::kM);
705   EmitR(0x1, rs2, rs1, 0x0, rd, 0x3b);
706 }
707 
Divw(XRegister rd,XRegister rs1,XRegister rs2)708 void Riscv64Assembler::Divw(XRegister rd, XRegister rs1, XRegister rs2) {
709   AssertExtensionsEnabled(Riscv64Extension::kM);
710   EmitR(0x1, rs2, rs1, 0x4, rd, 0x3b);
711 }
712 
Divuw(XRegister rd,XRegister rs1,XRegister rs2)713 void Riscv64Assembler::Divuw(XRegister rd, XRegister rs1, XRegister rs2) {
714   AssertExtensionsEnabled(Riscv64Extension::kM);
715   EmitR(0x1, rs2, rs1, 0x5, rd, 0x3b);
716 }
717 
Remw(XRegister rd,XRegister rs1,XRegister rs2)718 void Riscv64Assembler::Remw(XRegister rd, XRegister rs1, XRegister rs2) {
719   AssertExtensionsEnabled(Riscv64Extension::kM);
720   EmitR(0x1, rs2, rs1, 0x6, rd, 0x3b);
721 }
722 
Remuw(XRegister rd,XRegister rs1,XRegister rs2)723 void Riscv64Assembler::Remuw(XRegister rd, XRegister rs1, XRegister rs2) {
724   AssertExtensionsEnabled(Riscv64Extension::kM);
725   EmitR(0x1, rs2, rs1, 0x7, rd, 0x3b);
726 }
727 
728 //////////////////////////////// RV64 "M" Instructions  END ////////////////////////////////
729 
730 /////////////////////////////// RV64 "A" Instructions  START ///////////////////////////////
731 
LrW(XRegister rd,XRegister rs1,AqRl aqrl)732 void Riscv64Assembler::LrW(XRegister rd, XRegister rs1, AqRl aqrl) {
733   AssertExtensionsEnabled(Riscv64Extension::kA);
734   CHECK(aqrl != AqRl::kRelease);
735   EmitR4(0x2, enum_cast<uint32_t>(aqrl), 0x0, rs1, 0x2, rd, 0x2f);
736 }
737 
LrD(XRegister rd,XRegister rs1,AqRl aqrl)738 void Riscv64Assembler::LrD(XRegister rd, XRegister rs1, AqRl aqrl) {
739   AssertExtensionsEnabled(Riscv64Extension::kA);
740   CHECK(aqrl != AqRl::kRelease);
741   EmitR4(0x2, enum_cast<uint32_t>(aqrl), 0x0, rs1, 0x3, rd, 0x2f);
742 }
743 
ScW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)744 void Riscv64Assembler::ScW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
745   AssertExtensionsEnabled(Riscv64Extension::kA);
746   CHECK(aqrl != AqRl::kAcquire);
747   EmitR4(0x3, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
748 }
749 
ScD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)750 void Riscv64Assembler::ScD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
751   AssertExtensionsEnabled(Riscv64Extension::kA);
752   CHECK(aqrl != AqRl::kAcquire);
753   EmitR4(0x3, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
754 }
755 
AmoSwapW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)756 void Riscv64Assembler::AmoSwapW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
757   AssertExtensionsEnabled(Riscv64Extension::kA);
758   EmitR4(0x1, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
759 }
760 
AmoSwapD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)761 void Riscv64Assembler::AmoSwapD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
762   AssertExtensionsEnabled(Riscv64Extension::kA);
763   EmitR4(0x1, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
764 }
765 
AmoAddW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)766 void Riscv64Assembler::AmoAddW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
767   AssertExtensionsEnabled(Riscv64Extension::kA);
768   EmitR4(0x0, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
769 }
770 
AmoAddD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)771 void Riscv64Assembler::AmoAddD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
772   AssertExtensionsEnabled(Riscv64Extension::kA);
773   EmitR4(0x0, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
774 }
775 
AmoXorW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)776 void Riscv64Assembler::AmoXorW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
777   AssertExtensionsEnabled(Riscv64Extension::kA);
778   EmitR4(0x4, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
779 }
780 
AmoXorD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)781 void Riscv64Assembler::AmoXorD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
782   AssertExtensionsEnabled(Riscv64Extension::kA);
783   EmitR4(0x4, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
784 }
785 
AmoAndW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)786 void Riscv64Assembler::AmoAndW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
787   AssertExtensionsEnabled(Riscv64Extension::kA);
788   EmitR4(0xc, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
789 }
790 
AmoAndD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)791 void Riscv64Assembler::AmoAndD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
792   AssertExtensionsEnabled(Riscv64Extension::kA);
793   EmitR4(0xc, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
794 }
795 
AmoOrW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)796 void Riscv64Assembler::AmoOrW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
797   AssertExtensionsEnabled(Riscv64Extension::kA);
798   EmitR4(0x8, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
799 }
800 
AmoOrD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)801 void Riscv64Assembler::AmoOrD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
802   AssertExtensionsEnabled(Riscv64Extension::kA);
803   EmitR4(0x8, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
804 }
805 
AmoMinW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)806 void Riscv64Assembler::AmoMinW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
807   AssertExtensionsEnabled(Riscv64Extension::kA);
808   EmitR4(0x10, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
809 }
810 
AmoMinD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)811 void Riscv64Assembler::AmoMinD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
812   AssertExtensionsEnabled(Riscv64Extension::kA);
813   EmitR4(0x10, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
814 }
815 
AmoMaxW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)816 void Riscv64Assembler::AmoMaxW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
817   AssertExtensionsEnabled(Riscv64Extension::kA);
818   EmitR4(0x14, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
819 }
820 
AmoMaxD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)821 void Riscv64Assembler::AmoMaxD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
822   AssertExtensionsEnabled(Riscv64Extension::kA);
823   EmitR4(0x14, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
824 }
825 
AmoMinuW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)826 void Riscv64Assembler::AmoMinuW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
827   AssertExtensionsEnabled(Riscv64Extension::kA);
828   EmitR4(0x18, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
829 }
830 
AmoMinuD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)831 void Riscv64Assembler::AmoMinuD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
832   AssertExtensionsEnabled(Riscv64Extension::kA);
833   EmitR4(0x18, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
834 }
835 
AmoMaxuW(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)836 void Riscv64Assembler::AmoMaxuW(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
837   AssertExtensionsEnabled(Riscv64Extension::kA);
838   EmitR4(0x1c, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x2, rd, 0x2f);
839 }
840 
AmoMaxuD(XRegister rd,XRegister rs2,XRegister rs1,AqRl aqrl)841 void Riscv64Assembler::AmoMaxuD(XRegister rd, XRegister rs2, XRegister rs1, AqRl aqrl) {
842   AssertExtensionsEnabled(Riscv64Extension::kA);
843   EmitR4(0x1c, enum_cast<uint32_t>(aqrl), rs2, rs1, 0x3, rd, 0x2f);
844 }
845 
846 /////////////////////////////// RV64 "A" Instructions  END ///////////////////////////////
847 
848 ///////////////////////////// RV64 "Zicsr" Instructions  START /////////////////////////////
849 
850 // "Zicsr" Standard Extension, opcode = 0x73, funct3 from 0x1 ~ 0x3 and 0x5 ~ 0x7
851 
Csrrw(XRegister rd,uint32_t csr,XRegister rs1)852 void Riscv64Assembler::Csrrw(XRegister rd, uint32_t csr, XRegister rs1) {
853   AssertExtensionsEnabled(Riscv64Extension::kZicsr);
854   EmitI(ToInt12(csr), rs1, 0x1, rd, 0x73);
855 }
856 
Csrrs(XRegister rd,uint32_t csr,XRegister rs1)857 void Riscv64Assembler::Csrrs(XRegister rd, uint32_t csr, XRegister rs1) {
858   AssertExtensionsEnabled(Riscv64Extension::kZicsr);
859   EmitI(ToInt12(csr), rs1, 0x2, rd, 0x73);
860 }
861 
Csrrc(XRegister rd,uint32_t csr,XRegister rs1)862 void Riscv64Assembler::Csrrc(XRegister rd, uint32_t csr, XRegister rs1) {
863   AssertExtensionsEnabled(Riscv64Extension::kZicsr);
864   EmitI(ToInt12(csr), rs1, 0x3, rd, 0x73);
865 }
866 
Csrrwi(XRegister rd,uint32_t csr,uint32_t uimm5)867 void Riscv64Assembler::Csrrwi(XRegister rd, uint32_t csr, uint32_t uimm5) {
868   AssertExtensionsEnabled(Riscv64Extension::kZicsr);
869   EmitI(ToInt12(csr), uimm5, 0x5, rd, 0x73);
870 }
871 
Csrrsi(XRegister rd,uint32_t csr,uint32_t uimm5)872 void Riscv64Assembler::Csrrsi(XRegister rd, uint32_t csr, uint32_t uimm5) {
873   AssertExtensionsEnabled(Riscv64Extension::kZicsr);
874   EmitI(ToInt12(csr), uimm5, 0x6, rd, 0x73);
875 }
876 
Csrrci(XRegister rd,uint32_t csr,uint32_t uimm5)877 void Riscv64Assembler::Csrrci(XRegister rd, uint32_t csr, uint32_t uimm5) {
878   AssertExtensionsEnabled(Riscv64Extension::kZicsr);
879   EmitI(ToInt12(csr), uimm5, 0x7, rd, 0x73);
880 }
881 
882 ////////////////////////////// RV64 "Zicsr" Instructions  END //////////////////////////////
883 
884 /////////////////////////////// RV64 "FD" Instructions  START ///////////////////////////////
885 
886 // FP load/store instructions (RV32F+RV32D): opcode = 0x07, 0x27
887 
FLw(FRegister rd,XRegister rs1,int32_t offset)888 void Riscv64Assembler::FLw(FRegister rd, XRegister rs1, int32_t offset) {
889   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kF);
890   EmitI(offset, rs1, 0x2, rd, 0x07);
891 }
892 
FLd(FRegister rd,XRegister rs1,int32_t offset)893 void Riscv64Assembler::FLd(FRegister rd, XRegister rs1, int32_t offset) {
894   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kD);
895 
896   if (IsExtensionEnabled(Riscv64Extension::kZcd)) {
897     if (rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
898       CFLdsp(rd, offset);
899       return;
900     } else if (IsShortReg(rd) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
901       CFLd(rd, rs1, offset);
902       return;
903     }
904   }
905 
906   EmitI(offset, rs1, 0x3, rd, 0x07);
907 }
908 
FSw(FRegister rs2,XRegister rs1,int32_t offset)909 void Riscv64Assembler::FSw(FRegister rs2, XRegister rs1, int32_t offset) {
910   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kF);
911   EmitS(offset, rs2, rs1, 0x2, 0x27);
912 }
913 
FSd(FRegister rs2,XRegister rs1,int32_t offset)914 void Riscv64Assembler::FSd(FRegister rs2, XRegister rs1, int32_t offset) {
915   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kD);
916 
917   if (IsExtensionEnabled(Riscv64Extension::kZcd)) {
918     if (rs1 == SP && IsUint<9>(offset) && IsAligned<8>(offset)) {
919       CFSdsp(rs2, offset);
920       return;
921     } else if (IsShortReg(rs2) && IsShortReg(rs1) && IsUint<8>(offset) && IsAligned<8>(offset)) {
922       CFSd(rs2, rs1, offset);
923       return;
924     }
925   }
926 
927   EmitS(offset, rs2, rs1, 0x3, 0x27);
928 }
929 
930 // FP FMA instructions (RV32F+RV32D): opcode = 0x43, 0x47, 0x4b, 0x4f
931 
FMAddS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)932 void Riscv64Assembler::FMAddS(
933     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
934   AssertExtensionsEnabled(Riscv64Extension::kF);
935   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x43);
936 }
937 
FMAddD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)938 void Riscv64Assembler::FMAddD(
939     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
940   AssertExtensionsEnabled(Riscv64Extension::kD);
941   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x43);
942 }
943 
FMSubS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)944 void Riscv64Assembler::FMSubS(
945     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
946   AssertExtensionsEnabled(Riscv64Extension::kF);
947   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x47);
948 }
949 
FMSubD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)950 void Riscv64Assembler::FMSubD(
951     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
952   AssertExtensionsEnabled(Riscv64Extension::kD);
953   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x47);
954 }
955 
FNMSubS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)956 void Riscv64Assembler::FNMSubS(
957     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
958   AssertExtensionsEnabled(Riscv64Extension::kF);
959   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4b);
960 }
961 
FNMSubD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)962 void Riscv64Assembler::FNMSubD(
963     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
964   AssertExtensionsEnabled(Riscv64Extension::kD);
965   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4b);
966 }
967 
FNMAddS(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)968 void Riscv64Assembler::FNMAddS(
969     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
970   AssertExtensionsEnabled(Riscv64Extension::kF);
971   EmitR4(rs3, 0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4f);
972 }
973 
FNMAddD(FRegister rd,FRegister rs1,FRegister rs2,FRegister rs3,FPRoundingMode frm)974 void Riscv64Assembler::FNMAddD(
975     FRegister rd, FRegister rs1, FRegister rs2, FRegister rs3, FPRoundingMode frm) {
976   AssertExtensionsEnabled(Riscv64Extension::kD);
977   EmitR4(rs3, 0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x4f);
978 }
979 
980 // Simple FP instructions (RV32F+RV32D): opcode = 0x53, funct7 = 0b0XXXX0D
981 
FAddS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)982 void Riscv64Assembler::FAddS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
983   AssertExtensionsEnabled(Riscv64Extension::kF);
984   EmitR(0x0, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
985 }
986 
FAddD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)987 void Riscv64Assembler::FAddD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
988   AssertExtensionsEnabled(Riscv64Extension::kD);
989   EmitR(0x1, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
990 }
991 
FSubS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)992 void Riscv64Assembler::FSubS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
993   AssertExtensionsEnabled(Riscv64Extension::kF);
994   EmitR(0x4, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
995 }
996 
FSubD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)997 void Riscv64Assembler::FSubD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
998   AssertExtensionsEnabled(Riscv64Extension::kD);
999   EmitR(0x5, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1000 }
1001 
FMulS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1002 void Riscv64Assembler::FMulS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1003   AssertExtensionsEnabled(Riscv64Extension::kF);
1004   EmitR(0x8, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1005 }
1006 
FMulD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1007 void Riscv64Assembler::FMulD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1008   AssertExtensionsEnabled(Riscv64Extension::kD);
1009   EmitR(0x9, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1010 }
1011 
FDivS(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1012 void Riscv64Assembler::FDivS(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1013   AssertExtensionsEnabled(Riscv64Extension::kF);
1014   EmitR(0xc, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1015 }
1016 
FDivD(FRegister rd,FRegister rs1,FRegister rs2,FPRoundingMode frm)1017 void Riscv64Assembler::FDivD(FRegister rd, FRegister rs1, FRegister rs2, FPRoundingMode frm) {
1018   AssertExtensionsEnabled(Riscv64Extension::kD);
1019   EmitR(0xd, rs2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1020 }
1021 
FSqrtS(FRegister rd,FRegister rs1,FPRoundingMode frm)1022 void Riscv64Assembler::FSqrtS(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1023   AssertExtensionsEnabled(Riscv64Extension::kF);
1024   EmitR(0x2c, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1025 }
1026 
FSqrtD(FRegister rd,FRegister rs1,FPRoundingMode frm)1027 void Riscv64Assembler::FSqrtD(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1028   AssertExtensionsEnabled(Riscv64Extension::kD);
1029   EmitR(0x2d, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1030 }
1031 
FSgnjS(FRegister rd,FRegister rs1,FRegister rs2)1032 void Riscv64Assembler::FSgnjS(FRegister rd, FRegister rs1, FRegister rs2) {
1033   AssertExtensionsEnabled(Riscv64Extension::kF);
1034   EmitR(0x10, rs2, rs1, 0x0, rd, 0x53);
1035 }
1036 
FSgnjD(FRegister rd,FRegister rs1,FRegister rs2)1037 void Riscv64Assembler::FSgnjD(FRegister rd, FRegister rs1, FRegister rs2) {
1038   AssertExtensionsEnabled(Riscv64Extension::kD);
1039   EmitR(0x11, rs2, rs1, 0x0, rd, 0x53);
1040 }
1041 
FSgnjnS(FRegister rd,FRegister rs1,FRegister rs2)1042 void Riscv64Assembler::FSgnjnS(FRegister rd, FRegister rs1, FRegister rs2) {
1043   AssertExtensionsEnabled(Riscv64Extension::kF);
1044   EmitR(0x10, rs2, rs1, 0x1, rd, 0x53);
1045 }
1046 
FSgnjnD(FRegister rd,FRegister rs1,FRegister rs2)1047 void Riscv64Assembler::FSgnjnD(FRegister rd, FRegister rs1, FRegister rs2) {
1048   AssertExtensionsEnabled(Riscv64Extension::kD);
1049   EmitR(0x11, rs2, rs1, 0x1, rd, 0x53);
1050 }
1051 
FSgnjxS(FRegister rd,FRegister rs1,FRegister rs2)1052 void Riscv64Assembler::FSgnjxS(FRegister rd, FRegister rs1, FRegister rs2) {
1053   AssertExtensionsEnabled(Riscv64Extension::kF);
1054   EmitR(0x10, rs2, rs1, 0x2, rd, 0x53);
1055 }
1056 
FSgnjxD(FRegister rd,FRegister rs1,FRegister rs2)1057 void Riscv64Assembler::FSgnjxD(FRegister rd, FRegister rs1, FRegister rs2) {
1058   AssertExtensionsEnabled(Riscv64Extension::kD);
1059   EmitR(0x11, rs2, rs1, 0x2, rd, 0x53);
1060 }
1061 
FMinS(FRegister rd,FRegister rs1,FRegister rs2)1062 void Riscv64Assembler::FMinS(FRegister rd, FRegister rs1, FRegister rs2) {
1063   AssertExtensionsEnabled(Riscv64Extension::kF);
1064   EmitR(0x14, rs2, rs1, 0x0, rd, 0x53);
1065 }
1066 
FMinD(FRegister rd,FRegister rs1,FRegister rs2)1067 void Riscv64Assembler::FMinD(FRegister rd, FRegister rs1, FRegister rs2) {
1068   AssertExtensionsEnabled(Riscv64Extension::kD);
1069   EmitR(0x15, rs2, rs1, 0x0, rd, 0x53);
1070 }
1071 
FMaxS(FRegister rd,FRegister rs1,FRegister rs2)1072 void Riscv64Assembler::FMaxS(FRegister rd, FRegister rs1, FRegister rs2) {
1073   AssertExtensionsEnabled(Riscv64Extension::kF);
1074   EmitR(0x14, rs2, rs1, 0x1, rd, 0x53);
1075 }
1076 
FMaxD(FRegister rd,FRegister rs1,FRegister rs2)1077 void Riscv64Assembler::FMaxD(FRegister rd, FRegister rs1, FRegister rs2) {
1078   EmitR(0x15, rs2, rs1, 0x1, rd, 0x53);
1079   AssertExtensionsEnabled(Riscv64Extension::kD);
1080 }
1081 
FCvtSD(FRegister rd,FRegister rs1,FPRoundingMode frm)1082 void Riscv64Assembler::FCvtSD(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1083   AssertExtensionsEnabled(Riscv64Extension::kF, Riscv64Extension::kD);
1084   EmitR(0x20, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1085 }
1086 
FCvtDS(FRegister rd,FRegister rs1,FPRoundingMode frm)1087 void Riscv64Assembler::FCvtDS(FRegister rd, FRegister rs1, FPRoundingMode frm) {
1088   AssertExtensionsEnabled(Riscv64Extension::kF, Riscv64Extension::kD);
1089   // Note: The `frm` is useless, the result can represent every value of the source exactly.
1090   EmitR(0x21, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1091 }
1092 
1093 // FP compare instructions (RV32F+RV32D): opcode = 0x53, funct7 = 0b101000D
1094 
FEqS(XRegister rd,FRegister rs1,FRegister rs2)1095 void Riscv64Assembler::FEqS(XRegister rd, FRegister rs1, FRegister rs2) {
1096   AssertExtensionsEnabled(Riscv64Extension::kF);
1097   EmitR(0x50, rs2, rs1, 0x2, rd, 0x53);
1098 }
1099 
FEqD(XRegister rd,FRegister rs1,FRegister rs2)1100 void Riscv64Assembler::FEqD(XRegister rd, FRegister rs1, FRegister rs2) {
1101   AssertExtensionsEnabled(Riscv64Extension::kD);
1102   EmitR(0x51, rs2, rs1, 0x2, rd, 0x53);
1103 }
1104 
FLtS(XRegister rd,FRegister rs1,FRegister rs2)1105 void Riscv64Assembler::FLtS(XRegister rd, FRegister rs1, FRegister rs2) {
1106   AssertExtensionsEnabled(Riscv64Extension::kF);
1107   EmitR(0x50, rs2, rs1, 0x1, rd, 0x53);
1108 }
1109 
FLtD(XRegister rd,FRegister rs1,FRegister rs2)1110 void Riscv64Assembler::FLtD(XRegister rd, FRegister rs1, FRegister rs2) {
1111   AssertExtensionsEnabled(Riscv64Extension::kD);
1112   EmitR(0x51, rs2, rs1, 0x1, rd, 0x53);
1113 }
1114 
FLeS(XRegister rd,FRegister rs1,FRegister rs2)1115 void Riscv64Assembler::FLeS(XRegister rd, FRegister rs1, FRegister rs2) {
1116   AssertExtensionsEnabled(Riscv64Extension::kF);
1117   EmitR(0x50, rs2, rs1, 0x0, rd, 0x53);
1118 }
1119 
FLeD(XRegister rd,FRegister rs1,FRegister rs2)1120 void Riscv64Assembler::FLeD(XRegister rd, FRegister rs1, FRegister rs2) {
1121   AssertExtensionsEnabled(Riscv64Extension::kD);
1122   EmitR(0x51, rs2, rs1, 0x0, rd, 0x53);
1123 }
1124 
1125 // FP conversion instructions (RV32F+RV32D+RV64F+RV64D): opcode = 0x53, funct7 = 0b110X00D
1126 
FCvtWS(XRegister rd,FRegister rs1,FPRoundingMode frm)1127 void Riscv64Assembler::FCvtWS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1128   AssertExtensionsEnabled(Riscv64Extension::kF);
1129   EmitR(0x60, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1130 }
1131 
FCvtWD(XRegister rd,FRegister rs1,FPRoundingMode frm)1132 void Riscv64Assembler::FCvtWD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1133   AssertExtensionsEnabled(Riscv64Extension::kD);
1134   EmitR(0x61, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1135 }
1136 
FCvtWuS(XRegister rd,FRegister rs1,FPRoundingMode frm)1137 void Riscv64Assembler::FCvtWuS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1138   AssertExtensionsEnabled(Riscv64Extension::kF);
1139   EmitR(0x60, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1140 }
1141 
FCvtWuD(XRegister rd,FRegister rs1,FPRoundingMode frm)1142 void Riscv64Assembler::FCvtWuD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1143   AssertExtensionsEnabled(Riscv64Extension::kD);
1144   EmitR(0x61, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1145 }
1146 
FCvtLS(XRegister rd,FRegister rs1,FPRoundingMode frm)1147 void Riscv64Assembler::FCvtLS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1148   AssertExtensionsEnabled(Riscv64Extension::kF);
1149   EmitR(0x60, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1150 }
1151 
FCvtLD(XRegister rd,FRegister rs1,FPRoundingMode frm)1152 void Riscv64Assembler::FCvtLD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1153   AssertExtensionsEnabled(Riscv64Extension::kD);
1154   EmitR(0x61, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1155 }
1156 
FCvtLuS(XRegister rd,FRegister rs1,FPRoundingMode frm)1157 void Riscv64Assembler::FCvtLuS(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1158   AssertExtensionsEnabled(Riscv64Extension::kF);
1159   EmitR(0x60, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1160 }
1161 
FCvtLuD(XRegister rd,FRegister rs1,FPRoundingMode frm)1162 void Riscv64Assembler::FCvtLuD(XRegister rd, FRegister rs1, FPRoundingMode frm) {
1163   AssertExtensionsEnabled(Riscv64Extension::kD);
1164   EmitR(0x61, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1165 }
1166 
FCvtSW(FRegister rd,XRegister rs1,FPRoundingMode frm)1167 void Riscv64Assembler::FCvtSW(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1168   AssertExtensionsEnabled(Riscv64Extension::kF);
1169   EmitR(0x68, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1170 }
1171 
FCvtDW(FRegister rd,XRegister rs1,FPRoundingMode frm)1172 void Riscv64Assembler::FCvtDW(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1173   AssertExtensionsEnabled(Riscv64Extension::kD);
1174   // Note: The `frm` is useless, the result can represent every value of the source exactly.
1175   EmitR(0x69, 0x0, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1176 }
1177 
FCvtSWu(FRegister rd,XRegister rs1,FPRoundingMode frm)1178 void Riscv64Assembler::FCvtSWu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1179   AssertExtensionsEnabled(Riscv64Extension::kF);
1180   EmitR(0x68, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1181 }
1182 
FCvtDWu(FRegister rd,XRegister rs1,FPRoundingMode frm)1183 void Riscv64Assembler::FCvtDWu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1184   AssertExtensionsEnabled(Riscv64Extension::kD);
1185   // Note: The `frm` is useless, the result can represent every value of the source exactly.
1186   EmitR(0x69, 0x1, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1187 }
1188 
FCvtSL(FRegister rd,XRegister rs1,FPRoundingMode frm)1189 void Riscv64Assembler::FCvtSL(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1190   AssertExtensionsEnabled(Riscv64Extension::kF);
1191   EmitR(0x68, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1192 }
1193 
FCvtDL(FRegister rd,XRegister rs1,FPRoundingMode frm)1194 void Riscv64Assembler::FCvtDL(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1195   AssertExtensionsEnabled(Riscv64Extension::kD);
1196   EmitR(0x69, 0x2, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1197 }
1198 
FCvtSLu(FRegister rd,XRegister rs1,FPRoundingMode frm)1199 void Riscv64Assembler::FCvtSLu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1200   AssertExtensionsEnabled(Riscv64Extension::kF);
1201   EmitR(0x68, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1202 }
1203 
FCvtDLu(FRegister rd,XRegister rs1,FPRoundingMode frm)1204 void Riscv64Assembler::FCvtDLu(FRegister rd, XRegister rs1, FPRoundingMode frm) {
1205   AssertExtensionsEnabled(Riscv64Extension::kD);
1206   EmitR(0x69, 0x3, rs1, enum_cast<uint32_t>(frm), rd, 0x53);
1207 }
1208 
1209 // FP move instructions (RV32F+RV32D): opcode = 0x53, funct3 = 0x0, funct7 = 0b111X00D
1210 
FMvXW(XRegister rd,FRegister rs1)1211 void Riscv64Assembler::FMvXW(XRegister rd, FRegister rs1) {
1212   AssertExtensionsEnabled(Riscv64Extension::kF);
1213   EmitR(0x70, 0x0, rs1, 0x0, rd, 0x53);
1214 }
1215 
FMvXD(XRegister rd,FRegister rs1)1216 void Riscv64Assembler::FMvXD(XRegister rd, FRegister rs1) {
1217   AssertExtensionsEnabled(Riscv64Extension::kD);
1218   EmitR(0x71, 0x0, rs1, 0x0, rd, 0x53);
1219 }
1220 
FMvWX(FRegister rd,XRegister rs1)1221 void Riscv64Assembler::FMvWX(FRegister rd, XRegister rs1) {
1222   AssertExtensionsEnabled(Riscv64Extension::kF);
1223   EmitR(0x78, 0x0, rs1, 0x0, rd, 0x53);
1224 }
1225 
FMvDX(FRegister rd,XRegister rs1)1226 void Riscv64Assembler::FMvDX(FRegister rd, XRegister rs1) {
1227   AssertExtensionsEnabled(Riscv64Extension::kD);
1228   EmitR(0x79, 0x0, rs1, 0x0, rd, 0x53);
1229 }
1230 
1231 // FP classify instructions (RV32F+RV32D): opcode = 0x53, funct3 = 0x1, funct7 = 0b111X00D
1232 
FClassS(XRegister rd,FRegister rs1)1233 void Riscv64Assembler::FClassS(XRegister rd, FRegister rs1) {
1234   AssertExtensionsEnabled(Riscv64Extension::kF);
1235   EmitR(0x70, 0x0, rs1, 0x1, rd, 0x53);
1236 }
1237 
FClassD(XRegister rd,FRegister rs1)1238 void Riscv64Assembler::FClassD(XRegister rd, FRegister rs1) {
1239   AssertExtensionsEnabled(Riscv64Extension::kD);
1240   EmitR(0x71, 0x0, rs1, 0x1, rd, 0x53);
1241 }
1242 
1243 /////////////////////////////// RV64 "FD" Instructions  END ///////////////////////////////
1244 
1245 /////////////////////////////// RV64 "C" Instructions  START /////////////////////////////
1246 
CLwsp(XRegister rd,int32_t offset)1247 void Riscv64Assembler::CLwsp(XRegister rd, int32_t offset) {
1248   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1249   DCHECK_NE(rd, Zero);
1250   EmitCI(0b010u, rd, ExtractOffset52_76(offset), 0b10u);
1251 }
1252 
CLdsp(XRegister rd,int32_t offset)1253 void Riscv64Assembler::CLdsp(XRegister rd, int32_t offset) {
1254   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1255   DCHECK_NE(rd, Zero);
1256   EmitCI(0b011u, rd, ExtractOffset53_86(offset), 0b10u);
1257 }
1258 
CFLdsp(FRegister rd,int32_t offset)1259 void Riscv64Assembler::CFLdsp(FRegister rd, int32_t offset) {
1260   AssertExtensionsEnabled(
1261       Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1262   EmitCI(0b001u, rd, ExtractOffset53_86(offset), 0b10u);
1263 }
1264 
CSwsp(XRegister rs2,int32_t offset)1265 void Riscv64Assembler::CSwsp(XRegister rs2, int32_t offset) {
1266   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1267   EmitCSS(0b110u, ExtractOffset52_76(offset), rs2, 0b10u);
1268 }
1269 
CSdsp(XRegister rs2,int32_t offset)1270 void Riscv64Assembler::CSdsp(XRegister rs2, int32_t offset) {
1271   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1272   EmitCSS(0b111u, ExtractOffset53_86(offset), rs2, 0b10u);
1273 }
1274 
CFSdsp(FRegister rs2,int32_t offset)1275 void Riscv64Assembler::CFSdsp(FRegister rs2, int32_t offset) {
1276   AssertExtensionsEnabled(
1277       Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1278   EmitCSS(0b101u, ExtractOffset53_86(offset), rs2, 0b10u);
1279 }
1280 
CLw(XRegister rd_s,XRegister rs1_s,int32_t offset)1281 void Riscv64Assembler::CLw(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1282   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1283   EmitCM(0b010u, ExtractOffset52_6(offset), rs1_s, rd_s, 0b00u);
1284 }
1285 
CLd(XRegister rd_s,XRegister rs1_s,int32_t offset)1286 void Riscv64Assembler::CLd(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1287   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1288   EmitCM(0b011u, ExtractOffset53_76(offset), rs1_s, rd_s, 0b00u);
1289 }
1290 
CFLd(FRegister rd_s,XRegister rs1_s,int32_t offset)1291 void Riscv64Assembler::CFLd(FRegister rd_s, XRegister rs1_s, int32_t offset) {
1292   AssertExtensionsEnabled(
1293       Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1294   EmitCM(0b001u, ExtractOffset53_76(offset), rs1_s, rd_s, 0b00u);
1295 }
1296 
CSw(XRegister rs2_s,XRegister rs1_s,int32_t offset)1297 void Riscv64Assembler::CSw(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1298   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1299   EmitCM(0b110u, ExtractOffset52_6(offset), rs1_s, rs2_s, 0b00u);
1300 }
1301 
CSd(XRegister rs2_s,XRegister rs1_s,int32_t offset)1302 void Riscv64Assembler::CSd(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1303   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZca);
1304   EmitCM(0b111u, ExtractOffset53_76(offset), rs1_s, rs2_s, 0b00u);
1305 }
1306 
CFSd(FRegister rs2_s,XRegister rs1_s,int32_t offset)1307 void Riscv64Assembler::CFSd(FRegister rs2_s, XRegister rs1_s, int32_t offset) {
1308   AssertExtensionsEnabled(
1309       Riscv64Extension::kLoadStore, Riscv64Extension::kZcd, Riscv64Extension::kD);
1310   EmitCM(0b101u, ExtractOffset53_76(offset), rs1_s, rs2_s, 0b00u);
1311 }
1312 
CLi(XRegister rd,int32_t imm)1313 void Riscv64Assembler::CLi(XRegister rd, int32_t imm) {
1314   AssertExtensionsEnabled(Riscv64Extension::kZca);
1315   DCHECK_NE(rd, Zero);
1316   DCHECK(IsInt<6>(imm));
1317   EmitCI(0b010u, rd, EncodeInt6(imm), 0b01u);
1318 }
1319 
CLui(XRegister rd,uint32_t nzimm6)1320 void Riscv64Assembler::CLui(XRegister rd, uint32_t nzimm6) {
1321   AssertExtensionsEnabled(Riscv64Extension::kZca);
1322   DCHECK_NE(rd, Zero);
1323   DCHECK_NE(rd, SP);
1324   DCHECK(IsImmCLuiEncodable(nzimm6));
1325   EmitCI(0b011u, rd, nzimm6 & MaskLeastSignificant<uint32_t>(6), 0b01u);
1326 }
1327 
CAddi(XRegister rd,int32_t nzimm)1328 void Riscv64Assembler::CAddi(XRegister rd, int32_t nzimm) {
1329   AssertExtensionsEnabled(Riscv64Extension::kZca);
1330   DCHECK_NE(rd, Zero);
1331   DCHECK_NE(nzimm, 0);
1332   EmitCI(0b000u, rd, EncodeInt6(nzimm), 0b01u);
1333 }
1334 
CAddiw(XRegister rd,int32_t imm)1335 void Riscv64Assembler::CAddiw(XRegister rd, int32_t imm) {
1336   AssertExtensionsEnabled(Riscv64Extension::kZca);
1337   DCHECK_NE(rd, Zero);
1338   EmitCI(0b001u, rd, EncodeInt6(imm), 0b01u);
1339 }
1340 
CAddi16Sp(int32_t nzimm)1341 void Riscv64Assembler::CAddi16Sp(int32_t nzimm) {
1342   AssertExtensionsEnabled(Riscv64Extension::kZca);
1343   DCHECK_NE(nzimm, 0);
1344   DCHECK(IsAligned<16>(nzimm));
1345   DCHECK(IsInt<10>(nzimm));
1346 
1347   uint32_t unzimm = static_cast<uint32_t>(nzimm);
1348 
1349   // nzimm[9]
1350   uint32_t imms1 =  BitFieldExtract(unzimm, 9, 1);
1351   // nzimm[4|6|8:7|5]
1352   uint32_t imms0 = (BitFieldExtract(unzimm, 4, 1) << 4) |
1353                    (BitFieldExtract(unzimm, 6, 1) << 3) |
1354                    (BitFieldExtract(unzimm, 7, 2) << 1) |
1355                     BitFieldExtract(unzimm, 5, 1);
1356 
1357   EmitCI(0b011u, SP, BitFieldInsert(imms0, imms1, 5, 1), 0b01u);
1358 }
1359 
CAddi4Spn(XRegister rd_s,uint32_t nzuimm)1360 void Riscv64Assembler::CAddi4Spn(XRegister rd_s, uint32_t nzuimm) {
1361   AssertExtensionsEnabled(Riscv64Extension::kZca);
1362   DCHECK_NE(nzuimm, 0u);
1363   DCHECK(IsAligned<4>(nzuimm));
1364   DCHECK(IsUint<10>(nzuimm));
1365 
1366   // nzuimm[5:4|9:6|2|3]
1367   uint32_t uimm = (BitFieldExtract(nzuimm, 4, 2) << 6) |
1368                   (BitFieldExtract(nzuimm, 6, 4) << 2) |
1369                   (BitFieldExtract(nzuimm, 2, 1) << 1) |
1370                    BitFieldExtract(nzuimm, 3, 1);
1371 
1372   EmitCIW(0b000u, uimm, rd_s, 0b00u);
1373 }
1374 
CSlli(XRegister rd,int32_t shamt)1375 void Riscv64Assembler::CSlli(XRegister rd, int32_t shamt) {
1376   AssertExtensionsEnabled(Riscv64Extension::kZca);
1377   DCHECK_NE(shamt, 0);
1378   DCHECK_NE(rd, Zero);
1379   EmitCI(0b000u, rd, shamt, 0b10u);
1380 }
1381 
CSrli(XRegister rd_s,int32_t shamt)1382 void Riscv64Assembler::CSrli(XRegister rd_s, int32_t shamt) {
1383   AssertExtensionsEnabled(Riscv64Extension::kZca);
1384   DCHECK_NE(shamt, 0);
1385   DCHECK(IsUint<6>(shamt));
1386   EmitCBArithmetic(0b100u, 0b00u, shamt, rd_s, 0b01u);
1387 }
1388 
CSrai(XRegister rd_s,int32_t shamt)1389 void Riscv64Assembler::CSrai(XRegister rd_s, int32_t shamt) {
1390   AssertExtensionsEnabled(Riscv64Extension::kZca);
1391   DCHECK_NE(shamt, 0);
1392   DCHECK(IsUint<6>(shamt));
1393   EmitCBArithmetic(0b100u, 0b01u, shamt, rd_s, 0b01u);
1394 }
1395 
CAndi(XRegister rd_s,int32_t imm)1396 void Riscv64Assembler::CAndi(XRegister rd_s, int32_t imm) {
1397   AssertExtensionsEnabled(Riscv64Extension::kZca);
1398   DCHECK(IsInt<6>(imm));
1399   EmitCBArithmetic(0b100u, 0b10u, imm, rd_s, 0b01u);
1400 }
1401 
CMv(XRegister rd,XRegister rs2)1402 void Riscv64Assembler::CMv(XRegister rd, XRegister rs2) {
1403   AssertExtensionsEnabled(Riscv64Extension::kZca);
1404   DCHECK_NE(rd, Zero);
1405   DCHECK_NE(rs2, Zero);
1406   EmitCR(0b1000u, rd, rs2, 0b10u);
1407 }
1408 
CAdd(XRegister rd,XRegister rs2)1409 void Riscv64Assembler::CAdd(XRegister rd, XRegister rs2) {
1410   AssertExtensionsEnabled(Riscv64Extension::kZca);
1411   DCHECK_NE(rd, Zero);
1412   DCHECK_NE(rs2, Zero);
1413   EmitCR(0b1001u, rd, rs2, 0b10u);
1414 }
1415 
CAnd(XRegister rd_s,XRegister rs2_s)1416 void Riscv64Assembler::CAnd(XRegister rd_s, XRegister rs2_s) {
1417   AssertExtensionsEnabled(Riscv64Extension::kZca);
1418   EmitCAReg(0b100011u, rd_s, 0b11u, rs2_s, 0b01u);
1419 }
1420 
COr(XRegister rd_s,XRegister rs2_s)1421 void Riscv64Assembler::COr(XRegister rd_s, XRegister rs2_s) {
1422   AssertExtensionsEnabled(Riscv64Extension::kZca);
1423   EmitCAReg(0b100011u, rd_s, 0b10u, rs2_s, 0b01u);
1424 }
1425 
CXor(XRegister rd_s,XRegister rs2_s)1426 void Riscv64Assembler::CXor(XRegister rd_s, XRegister rs2_s) {
1427   AssertExtensionsEnabled(Riscv64Extension::kZca);
1428   EmitCAReg(0b100011u, rd_s, 0b01u, rs2_s, 0b01u);
1429 }
1430 
CSub(XRegister rd_s,XRegister rs2_s)1431 void Riscv64Assembler::CSub(XRegister rd_s, XRegister rs2_s) {
1432   AssertExtensionsEnabled(Riscv64Extension::kZca);
1433   EmitCAReg(0b100011u, rd_s, 0b00u, rs2_s, 0b01u);
1434 }
1435 
CAddw(XRegister rd_s,XRegister rs2_s)1436 void Riscv64Assembler::CAddw(XRegister rd_s, XRegister rs2_s) {
1437   AssertExtensionsEnabled(Riscv64Extension::kZca);
1438   EmitCAReg(0b100111u, rd_s, 0b01u, rs2_s, 0b01u);
1439 }
1440 
CSubw(XRegister rd_s,XRegister rs2_s)1441 void Riscv64Assembler::CSubw(XRegister rd_s, XRegister rs2_s) {
1442   AssertExtensionsEnabled(Riscv64Extension::kZca);
1443   EmitCAReg(0b100111u, rd_s, 0b00u, rs2_s, 0b01u);
1444 }
1445 
1446 // "Zcb" Standard Extension, part of "C", opcode = 0b00, 0b01, funct3 = 0b100.
1447 
CLbu(XRegister rd_s,XRegister rs1_s,int32_t offset)1448 void Riscv64Assembler::CLbu(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1449   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1450   EmitCAReg(0b100000u, rs1_s, EncodeOffset0_1(offset), rd_s, 0b00u);
1451 }
1452 
CLhu(XRegister rd_s,XRegister rs1_s,int32_t offset)1453 void Riscv64Assembler::CLhu(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1454   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1455   DCHECK(IsUint<2>(offset));
1456   DCHECK_ALIGNED(offset, 2);
1457   EmitCAReg(0b100001u, rs1_s, BitFieldExtract<uint32_t>(offset, 1, 1), rd_s, 0b00u);
1458 }
1459 
CLh(XRegister rd_s,XRegister rs1_s,int32_t offset)1460 void Riscv64Assembler::CLh(XRegister rd_s, XRegister rs1_s, int32_t offset) {
1461   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1462   DCHECK(IsUint<2>(offset));
1463   DCHECK_ALIGNED(offset, 2);
1464   EmitCAReg(0b100001u, rs1_s, 0b10 | BitFieldExtract<uint32_t>(offset, 1, 1), rd_s, 0b00u);
1465 }
1466 
CSb(XRegister rs2_s,XRegister rs1_s,int32_t offset)1467 void Riscv64Assembler::CSb(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1468   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1469   EmitCAReg(0b100010u, rs1_s, EncodeOffset0_1(offset), rs2_s, 0b00u);
1470 }
1471 
CSh(XRegister rs2_s,XRegister rs1_s,int32_t offset)1472 void Riscv64Assembler::CSh(XRegister rs2_s, XRegister rs1_s, int32_t offset) {
1473   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kZcb);
1474   DCHECK(IsUint<2>(offset));
1475   DCHECK_ALIGNED(offset, 2);
1476   EmitCAReg(0b100011u, rs1_s, BitFieldExtract<uint32_t>(offset, 1, 1), rs2_s, 0b00u);
1477 }
1478 
CZextB(XRegister rd_rs1_s)1479 void Riscv64Assembler::CZextB(XRegister rd_rs1_s) {
1480   AssertExtensionsEnabled(Riscv64Extension::kZcb);
1481   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b000u, 0b01u);
1482 }
1483 
CSextB(XRegister rd_rs1_s)1484 void Riscv64Assembler::CSextB(XRegister rd_rs1_s) {
1485   AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
1486   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b001u, 0b01u);
1487 }
1488 
CZextH(XRegister rd_rs1_s)1489 void Riscv64Assembler::CZextH(XRegister rd_rs1_s) {
1490   AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
1491   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b010u, 0b01u);
1492 }
1493 
CSextH(XRegister rd_rs1_s)1494 void Riscv64Assembler::CSextH(XRegister rd_rs1_s) {
1495   AssertExtensionsEnabled(Riscv64Extension::kZbb, Riscv64Extension::kZcb);
1496   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b011u, 0b01u);
1497 }
1498 
CZextW(XRegister rd_rs1_s)1499 void Riscv64Assembler::CZextW(XRegister rd_rs1_s) {
1500   AssertExtensionsEnabled(Riscv64Extension::kZba, Riscv64Extension::kZcb);
1501   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b100u, 0b01u);
1502 }
1503 
CNot(XRegister rd_rs1_s)1504 void Riscv64Assembler::CNot(XRegister rd_rs1_s) {
1505   AssertExtensionsEnabled(Riscv64Extension::kZcb);
1506   EmitCAImm(0b100111u, rd_rs1_s, 0b11u, 0b101u, 0b01u);
1507 }
1508 
CMul(XRegister rd_s,XRegister rs2_s)1509 void Riscv64Assembler::CMul(XRegister rd_s, XRegister rs2_s) {
1510   AssertExtensionsEnabled(Riscv64Extension::kM, Riscv64Extension::kZcb);
1511   EmitCAReg(0b100111u, rd_s, 0b10u, rs2_s, 0b01u);
1512 }
1513 
CJ(int32_t offset)1514 void Riscv64Assembler::CJ(int32_t offset) {
1515   AssertExtensionsEnabled(Riscv64Extension::kZca);
1516   EmitCJ(0b101u, offset, 0b01u);
1517 }
1518 
CJr(XRegister rs1)1519 void Riscv64Assembler::CJr(XRegister rs1) {
1520   AssertExtensionsEnabled(Riscv64Extension::kZca);
1521   DCHECK_NE(rs1, Zero);
1522   EmitCR(0b1000u, rs1, Zero, 0b10u);
1523 }
1524 
CJalr(XRegister rs1)1525 void Riscv64Assembler::CJalr(XRegister rs1) {
1526   AssertExtensionsEnabled(Riscv64Extension::kZca);
1527   DCHECK_NE(rs1, Zero);
1528   EmitCR(0b1001u, rs1, Zero, 0b10u);
1529 }
1530 
CBeqz(XRegister rs1_s,int32_t offset)1531 void Riscv64Assembler::CBeqz(XRegister rs1_s, int32_t offset) {
1532   AssertExtensionsEnabled(Riscv64Extension::kZca);
1533   EmitCBBranch(0b110u, offset, rs1_s, 0b01u);
1534 }
1535 
CBnez(XRegister rs1_s,int32_t offset)1536 void Riscv64Assembler::CBnez(XRegister rs1_s, int32_t offset) {
1537   AssertExtensionsEnabled(Riscv64Extension::kZca);
1538   EmitCBBranch(0b111u, offset, rs1_s, 0b01u);
1539 }
1540 
CEbreak()1541 void Riscv64Assembler::CEbreak() {
1542   AssertExtensionsEnabled(Riscv64Extension::kZca);
1543   EmitCR(0b1001u, Zero, Zero, 0b10u);
1544 }
1545 
CNop()1546 void Riscv64Assembler::CNop() {
1547   AssertExtensionsEnabled(Riscv64Extension::kZca);
1548   EmitCI(0b000u, Zero, 0u, 0b01u);
1549 }
1550 
CUnimp()1551 void Riscv64Assembler::CUnimp() {
1552   AssertExtensionsEnabled(Riscv64Extension::kZca);
1553   Emit16(0x0u);
1554 }
1555 
1556 /////////////////////////////// RV64 "C" Instructions  END ///////////////////////////////
1557 
1558 ////////////////////////////// RV64 "Zba" Instructions  START /////////////////////////////
1559 
AddUw(XRegister rd,XRegister rs1,XRegister rs2)1560 void Riscv64Assembler::AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1561   AssertExtensionsEnabled(Riscv64Extension::kZba);
1562   EmitR(0x4, rs2, rs1, 0x0, rd, 0x3b);
1563 }
1564 
Sh1Add(XRegister rd,XRegister rs1,XRegister rs2)1565 void Riscv64Assembler::Sh1Add(XRegister rd, XRegister rs1, XRegister rs2) {
1566   AssertExtensionsEnabled(Riscv64Extension::kZba);
1567   EmitR(0x10, rs2, rs1, 0x2, rd, 0x33);
1568 }
1569 
Sh1AddUw(XRegister rd,XRegister rs1,XRegister rs2)1570 void Riscv64Assembler::Sh1AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1571   AssertExtensionsEnabled(Riscv64Extension::kZba);
1572   EmitR(0x10, rs2, rs1, 0x2, rd, 0x3b);
1573 }
1574 
Sh2Add(XRegister rd,XRegister rs1,XRegister rs2)1575 void Riscv64Assembler::Sh2Add(XRegister rd, XRegister rs1, XRegister rs2) {
1576   AssertExtensionsEnabled(Riscv64Extension::kZba);
1577   EmitR(0x10, rs2, rs1, 0x4, rd, 0x33);
1578 }
1579 
Sh2AddUw(XRegister rd,XRegister rs1,XRegister rs2)1580 void Riscv64Assembler::Sh2AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1581   AssertExtensionsEnabled(Riscv64Extension::kZba);
1582   EmitR(0x10, rs2, rs1, 0x4, rd, 0x3b);
1583 }
1584 
Sh3Add(XRegister rd,XRegister rs1,XRegister rs2)1585 void Riscv64Assembler::Sh3Add(XRegister rd, XRegister rs1, XRegister rs2) {
1586   AssertExtensionsEnabled(Riscv64Extension::kZba);
1587   EmitR(0x10, rs2, rs1, 0x6, rd, 0x33);
1588 }
1589 
Sh3AddUw(XRegister rd,XRegister rs1,XRegister rs2)1590 void Riscv64Assembler::Sh3AddUw(XRegister rd, XRegister rs1, XRegister rs2) {
1591   AssertExtensionsEnabled(Riscv64Extension::kZba);
1592   EmitR(0x10, rs2, rs1, 0x6, rd, 0x3b);
1593 }
1594 
SlliUw(XRegister rd,XRegister rs1,int32_t shamt)1595 void Riscv64Assembler::SlliUw(XRegister rd, XRegister rs1, int32_t shamt) {
1596   AssertExtensionsEnabled(Riscv64Extension::kZba);
1597   EmitI6(0x2, shamt, rs1, 0x1, rd, 0x1b);
1598 }
1599 
1600 /////////////////////////////// RV64 "Zba" Instructions  END //////////////////////////////
1601 
1602 ////////////////////////////// RV64 "Zbb" Instructions  START /////////////////////////////
1603 
Andn(XRegister rd,XRegister rs1,XRegister rs2)1604 void Riscv64Assembler::Andn(XRegister rd, XRegister rs1, XRegister rs2) {
1605   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1606   EmitR(0x20, rs2, rs1, 0x7, rd, 0x33);
1607 }
1608 
Orn(XRegister rd,XRegister rs1,XRegister rs2)1609 void Riscv64Assembler::Orn(XRegister rd, XRegister rs1, XRegister rs2) {
1610   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1611   EmitR(0x20, rs2, rs1, 0x6, rd, 0x33);
1612 }
1613 
Xnor(XRegister rd,XRegister rs1,XRegister rs2)1614 void Riscv64Assembler::Xnor(XRegister rd, XRegister rs1, XRegister rs2) {
1615   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1616   EmitR(0x20, rs2, rs1, 0x4, rd, 0x33);
1617 }
1618 
Clz(XRegister rd,XRegister rs1)1619 void Riscv64Assembler::Clz(XRegister rd, XRegister rs1) {
1620   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1621   EmitR(0x30, 0x0, rs1, 0x1, rd, 0x13);
1622 }
1623 
Clzw(XRegister rd,XRegister rs1)1624 void Riscv64Assembler::Clzw(XRegister rd, XRegister rs1) {
1625   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1626   EmitR(0x30, 0x0, rs1, 0x1, rd, 0x1b);
1627 }
1628 
Ctz(XRegister rd,XRegister rs1)1629 void Riscv64Assembler::Ctz(XRegister rd, XRegister rs1) {
1630   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1631   EmitR(0x30, 0x1, rs1, 0x1, rd, 0x13);
1632 }
1633 
Ctzw(XRegister rd,XRegister rs1)1634 void Riscv64Assembler::Ctzw(XRegister rd, XRegister rs1) {
1635   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1636   EmitR(0x30, 0x1, rs1, 0x1, rd, 0x1b);
1637 }
1638 
Cpop(XRegister rd,XRegister rs1)1639 void Riscv64Assembler::Cpop(XRegister rd, XRegister rs1) {
1640   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1641   EmitR(0x30, 0x2, rs1, 0x1, rd, 0x13);
1642 }
1643 
Cpopw(XRegister rd,XRegister rs1)1644 void Riscv64Assembler::Cpopw(XRegister rd, XRegister rs1) {
1645   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1646   EmitR(0x30, 0x2, rs1, 0x1, rd, 0x1b);
1647 }
1648 
Min(XRegister rd,XRegister rs1,XRegister rs2)1649 void Riscv64Assembler::Min(XRegister rd, XRegister rs1, XRegister rs2) {
1650   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1651   EmitR(0x5, rs2, rs1, 0x4, rd, 0x33);
1652 }
1653 
Minu(XRegister rd,XRegister rs1,XRegister rs2)1654 void Riscv64Assembler::Minu(XRegister rd, XRegister rs1, XRegister rs2) {
1655   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1656   EmitR(0x5, rs2, rs1, 0x5, rd, 0x33);
1657 }
1658 
Max(XRegister rd,XRegister rs1,XRegister rs2)1659 void Riscv64Assembler::Max(XRegister rd, XRegister rs1, XRegister rs2) {
1660   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1661   EmitR(0x5, rs2, rs1, 0x6, rd, 0x33);
1662 }
1663 
Maxu(XRegister rd,XRegister rs1,XRegister rs2)1664 void Riscv64Assembler::Maxu(XRegister rd, XRegister rs1, XRegister rs2) {
1665   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1666   EmitR(0x5, rs2, rs1, 0x7, rd, 0x33);
1667 }
1668 
Rol(XRegister rd,XRegister rs1,XRegister rs2)1669 void Riscv64Assembler::Rol(XRegister rd, XRegister rs1, XRegister rs2) {
1670   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1671   EmitR(0x30, rs2, rs1, 0x1, rd, 0x33);
1672 }
1673 
Rolw(XRegister rd,XRegister rs1,XRegister rs2)1674 void Riscv64Assembler::Rolw(XRegister rd, XRegister rs1, XRegister rs2) {
1675   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1676   EmitR(0x30, rs2, rs1, 0x1, rd, 0x3b);
1677 }
1678 
Ror(XRegister rd,XRegister rs1,XRegister rs2)1679 void Riscv64Assembler::Ror(XRegister rd, XRegister rs1, XRegister rs2) {
1680   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1681   EmitR(0x30, rs2, rs1, 0x5, rd, 0x33);
1682 }
1683 
Rorw(XRegister rd,XRegister rs1,XRegister rs2)1684 void Riscv64Assembler::Rorw(XRegister rd, XRegister rs1, XRegister rs2) {
1685   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1686   EmitR(0x30, rs2, rs1, 0x5, rd, 0x3b);
1687 }
1688 
Rori(XRegister rd,XRegister rs1,int32_t shamt)1689 void Riscv64Assembler::Rori(XRegister rd, XRegister rs1, int32_t shamt) {
1690   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1691   CHECK_LT(static_cast<uint32_t>(shamt), 64u);
1692   EmitI6(0x18, shamt, rs1, 0x5, rd, 0x13);
1693 }
1694 
Roriw(XRegister rd,XRegister rs1,int32_t shamt)1695 void Riscv64Assembler::Roriw(XRegister rd, XRegister rs1, int32_t shamt) {
1696   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1697   CHECK_LT(static_cast<uint32_t>(shamt), 32u);
1698   EmitI6(0x18, shamt, rs1, 0x5, rd, 0x1b);
1699 }
1700 
OrcB(XRegister rd,XRegister rs1)1701 void Riscv64Assembler::OrcB(XRegister rd, XRegister rs1) {
1702   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1703   EmitR(0x14, 0x7, rs1, 0x5, rd, 0x13);
1704 }
1705 
Rev8(XRegister rd,XRegister rs1)1706 void Riscv64Assembler::Rev8(XRegister rd, XRegister rs1) {
1707   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1708   EmitR(0x35, 0x18, rs1, 0x5, rd, 0x13);
1709 }
1710 
ZbbSextB(XRegister rd,XRegister rs1)1711 void Riscv64Assembler::ZbbSextB(XRegister rd, XRegister rs1) {
1712   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1713   EmitR(0x30, 0x4, rs1, 0x1, rd, 0x13);
1714 }
1715 
ZbbSextH(XRegister rd,XRegister rs1)1716 void Riscv64Assembler::ZbbSextH(XRegister rd, XRegister rs1) {
1717   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1718   EmitR(0x30, 0x5, rs1, 0x1, rd, 0x13);
1719 }
1720 
ZbbZextH(XRegister rd,XRegister rs1)1721 void Riscv64Assembler::ZbbZextH(XRegister rd, XRegister rs1) {
1722   AssertExtensionsEnabled(Riscv64Extension::kZbb);
1723   EmitR(0x4, 0x0, rs1, 0x4, rd, 0x3b);
1724 }
1725 
1726 /////////////////////////////// RV64 "Zbb" Instructions  END //////////////////////////////
1727 
1728 /////////////////////////////// RVV "VSet" Instructions  START ////////////////////////////
1729 
VSetvli(XRegister rd,XRegister rs1,uint32_t vtypei)1730 void Riscv64Assembler::VSetvli(XRegister rd, XRegister rs1, uint32_t vtypei) {
1731   AssertExtensionsEnabled(Riscv64Extension::kV);
1732   DCHECK(IsUint<11>(vtypei));
1733   EmitI(vtypei, rs1, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
1734 }
1735 
VSetivli(XRegister rd,uint32_t uimm,uint32_t vtypei)1736 void Riscv64Assembler::VSetivli(XRegister rd, uint32_t uimm, uint32_t vtypei) {
1737   AssertExtensionsEnabled(Riscv64Extension::kV);
1738   DCHECK(IsUint<10>(vtypei));
1739   DCHECK(IsUint<5>(uimm));
1740   EmitI((~0U << 10 | vtypei), uimm, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
1741 }
1742 
VSetvl(XRegister rd,XRegister rs1,XRegister rs2)1743 void Riscv64Assembler::VSetvl(XRegister rd, XRegister rs1, XRegister rs2) {
1744   AssertExtensionsEnabled(Riscv64Extension::kV);
1745   EmitR(0x40, rs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPCFG), rd, 0x57);
1746 }
1747 
1748 /////////////////////////////// RVV "VSet" Instructions  END //////////////////////////////
1749 
1750 /////////////////////////////// RVV Load/Store Instructions  START ////////////////////////////
1751 
VLe8(VRegister vd,XRegister rs1,VM vm)1752 void Riscv64Assembler::VLe8(VRegister vd, XRegister rs1, VM vm) {
1753   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1754   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1755   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1756   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1757 }
1758 
VLe16(VRegister vd,XRegister rs1,VM vm)1759 void Riscv64Assembler::VLe16(VRegister vd, XRegister rs1, VM vm) {
1760   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1761   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1762   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1763   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1764 }
1765 
VLe32(VRegister vd,XRegister rs1,VM vm)1766 void Riscv64Assembler::VLe32(VRegister vd, XRegister rs1, VM vm) {
1767   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1768   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1769   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1770   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1771 }
1772 
VLe64(VRegister vd,XRegister rs1,VM vm)1773 void Riscv64Assembler::VLe64(VRegister vd, XRegister rs1, VM vm) {
1774   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1775   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1776   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1777   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1778 }
1779 
VSe8(VRegister vs3,XRegister rs1,VM vm)1780 void Riscv64Assembler::VSe8(VRegister vs3, XRegister rs1, VM vm) {
1781   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1782   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1783   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
1784 }
1785 
VSe16(VRegister vs3,XRegister rs1,VM vm)1786 void Riscv64Assembler::VSe16(VRegister vs3, XRegister rs1, VM vm) {
1787   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1788   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1789   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
1790 }
1791 
VSe32(VRegister vs3,XRegister rs1,VM vm)1792 void Riscv64Assembler::VSe32(VRegister vs3, XRegister rs1, VM vm) {
1793   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1794   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1795   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
1796 }
1797 
VSe64(VRegister vs3,XRegister rs1,VM vm)1798 void Riscv64Assembler::VSe64(VRegister vs3, XRegister rs1, VM vm) {
1799   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1800   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, vm);
1801   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
1802 }
1803 
VLm(VRegister vd,XRegister rs1)1804 void Riscv64Assembler::VLm(VRegister vd, XRegister rs1) {
1805   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1806   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1807   EmitR(funct7, 0b01011, rs1, enum_cast<uint32_t>(VectorWidth::kMask), vd, 0x7);
1808 }
1809 
VSm(VRegister vs3,XRegister rs1)1810 void Riscv64Assembler::VSm(VRegister vs3, XRegister rs1) {
1811   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1812   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1813   EmitR(funct7, 0b01011, rs1, enum_cast<uint32_t>(VectorWidth::kMask), vs3, 0x27);
1814 }
1815 
VLe8ff(VRegister vd,XRegister rs1)1816 void Riscv64Assembler::VLe8ff(VRegister vd, XRegister rs1) {
1817   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1818   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1819   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1820 }
1821 
VLe16ff(VRegister vd,XRegister rs1)1822 void Riscv64Assembler::VLe16ff(VRegister vd, XRegister rs1) {
1823   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1824   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1825   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1826 }
1827 
VLe32ff(VRegister vd,XRegister rs1)1828 void Riscv64Assembler::VLe32ff(VRegister vd, XRegister rs1) {
1829   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1830   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1831   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1832 }
1833 
VLe64ff(VRegister vd,XRegister rs1)1834 void Riscv64Assembler::VLe64ff(VRegister vd, XRegister rs1) {
1835   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1836   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
1837   EmitR(funct7, 0b10000, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1838 }
1839 
VLse8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1840 void Riscv64Assembler::VLse8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1841   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1842   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1843   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1844   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1845 }
1846 
VLse16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1847 void Riscv64Assembler::VLse16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1848   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1849   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1850   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1851   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1852 }
1853 
VLse32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1854 void Riscv64Assembler::VLse32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1855   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1856   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1857   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1858   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1859 }
1860 
VLse64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)1861 void Riscv64Assembler::VLse64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
1862   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1863   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1864   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1865   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1866 }
1867 
VSse8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1868 void Riscv64Assembler::VSse8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1869   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1870   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1871   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
1872 }
1873 
VSse16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1874 void Riscv64Assembler::VSse16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1875   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1876   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1877   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
1878 }
1879 
VSse32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1880 void Riscv64Assembler::VSse32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1881   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1882   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1883   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
1884 }
1885 
VSse64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)1886 void Riscv64Assembler::VSse64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
1887   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1888   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kStrided, vm);
1889   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
1890 }
1891 
VLoxei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1892 void Riscv64Assembler::VLoxei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1893   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1894   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1895   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1896   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1897 }
1898 
VLoxei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1899 void Riscv64Assembler::VLoxei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1900   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1901   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1902   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1903   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1904 }
1905 
VLoxei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1906 void Riscv64Assembler::VLoxei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1907   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1908   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1909   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1910   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1911 }
1912 
VLoxei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1913 void Riscv64Assembler::VLoxei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1914   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1915   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1916   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1917   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1918 }
1919 
VLuxei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1920 void Riscv64Assembler::VLuxei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1921   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1922   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1923   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1924   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
1925 }
1926 
VLuxei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1927 void Riscv64Assembler::VLuxei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1928   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1929   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1930   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1931   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
1932 }
1933 
VLuxei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1934 void Riscv64Assembler::VLuxei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1935   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1936   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1937   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1938   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
1939 }
1940 
VLuxei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)1941 void Riscv64Assembler::VLuxei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
1942   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1943   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1944   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1945   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
1946 }
1947 
VSoxei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1948 void Riscv64Assembler::VSoxei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1949   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1950   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1951   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
1952 }
1953 
VSoxei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1954 void Riscv64Assembler::VSoxei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1955   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1956   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1957   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
1958 }
1959 
VSoxei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1960 void Riscv64Assembler::VSoxei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1961   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1962   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1963   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
1964 }
1965 
VSoxei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1966 void Riscv64Assembler::VSoxei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1967   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1968   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedOrdered, vm);
1969   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
1970 }
1971 
VSuxei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1972 void Riscv64Assembler::VSuxei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1973   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1974   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1975   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
1976 }
1977 
VSuxei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1978 void Riscv64Assembler::VSuxei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1979   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1980   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1981   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
1982 }
1983 
VSuxei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1984 void Riscv64Assembler::VSuxei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1985   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1986   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1987   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
1988 }
1989 
VSuxei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)1990 void Riscv64Assembler::VSuxei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
1991   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1992   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kIndexedUnordered, vm);
1993   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
1994 }
1995 
VLseg2e8(VRegister vd,XRegister rs1,VM vm)1996 void Riscv64Assembler::VLseg2e8(VRegister vd, XRegister rs1, VM vm) {
1997   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
1998   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
1999   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2000   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2001 }
2002 
VLseg2e16(VRegister vd,XRegister rs1,VM vm)2003 void Riscv64Assembler::VLseg2e16(VRegister vd, XRegister rs1, VM vm) {
2004   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2005   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2006   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2007   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2008 }
2009 
VLseg2e32(VRegister vd,XRegister rs1,VM vm)2010 void Riscv64Assembler::VLseg2e32(VRegister vd, XRegister rs1, VM vm) {
2011   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2012   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2013   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2014   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2015 }
2016 
VLseg2e64(VRegister vd,XRegister rs1,VM vm)2017 void Riscv64Assembler::VLseg2e64(VRegister vd, XRegister rs1, VM vm) {
2018   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2019   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2020   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2021   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2022 }
2023 
VLseg3e8(VRegister vd,XRegister rs1,VM vm)2024 void Riscv64Assembler::VLseg3e8(VRegister vd, XRegister rs1, VM vm) {
2025   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2026   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2027   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2028   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2029 }
2030 
VLseg3e16(VRegister vd,XRegister rs1,VM vm)2031 void Riscv64Assembler::VLseg3e16(VRegister vd, XRegister rs1, VM vm) {
2032   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2033   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2034   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2035   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2036 }
2037 
VLseg3e32(VRegister vd,XRegister rs1,VM vm)2038 void Riscv64Assembler::VLseg3e32(VRegister vd, XRegister rs1, VM vm) {
2039   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2040   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2041   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2042   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2043 }
2044 
VLseg3e64(VRegister vd,XRegister rs1,VM vm)2045 void Riscv64Assembler::VLseg3e64(VRegister vd, XRegister rs1, VM vm) {
2046   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2047   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2048   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2049   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2050 }
2051 
VLseg4e8(VRegister vd,XRegister rs1,VM vm)2052 void Riscv64Assembler::VLseg4e8(VRegister vd, XRegister rs1, VM vm) {
2053   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2054   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2055   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2056   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2057 }
2058 
VLseg4e16(VRegister vd,XRegister rs1,VM vm)2059 void Riscv64Assembler::VLseg4e16(VRegister vd, XRegister rs1, VM vm) {
2060   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2061   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2062   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2063   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2064 }
2065 
VLseg4e32(VRegister vd,XRegister rs1,VM vm)2066 void Riscv64Assembler::VLseg4e32(VRegister vd, XRegister rs1, VM vm) {
2067   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2068   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2069   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2070   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2071 }
2072 
VLseg4e64(VRegister vd,XRegister rs1,VM vm)2073 void Riscv64Assembler::VLseg4e64(VRegister vd, XRegister rs1, VM vm) {
2074   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2075   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2076   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2077   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2078 }
2079 
VLseg5e8(VRegister vd,XRegister rs1,VM vm)2080 void Riscv64Assembler::VLseg5e8(VRegister vd, XRegister rs1, VM vm) {
2081   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2082   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2083   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2084   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2085 }
2086 
VLseg5e16(VRegister vd,XRegister rs1,VM vm)2087 void Riscv64Assembler::VLseg5e16(VRegister vd, XRegister rs1, VM vm) {
2088   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2089   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2090   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2091   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2092 }
2093 
VLseg5e32(VRegister vd,XRegister rs1,VM vm)2094 void Riscv64Assembler::VLseg5e32(VRegister vd, XRegister rs1, VM vm) {
2095   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2096   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2097   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2098   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2099 }
2100 
VLseg5e64(VRegister vd,XRegister rs1,VM vm)2101 void Riscv64Assembler::VLseg5e64(VRegister vd, XRegister rs1, VM vm) {
2102   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2103   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2104   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2105   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2106 }
2107 
VLseg6e8(VRegister vd,XRegister rs1,VM vm)2108 void Riscv64Assembler::VLseg6e8(VRegister vd, XRegister rs1, VM vm) {
2109   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2110   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2111   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2112   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2113 }
2114 
VLseg6e16(VRegister vd,XRegister rs1,VM vm)2115 void Riscv64Assembler::VLseg6e16(VRegister vd, XRegister rs1, VM vm) {
2116   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2117   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2118   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2119   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2120 }
2121 
VLseg6e32(VRegister vd,XRegister rs1,VM vm)2122 void Riscv64Assembler::VLseg6e32(VRegister vd, XRegister rs1, VM vm) {
2123   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2124   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2125   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2126   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2127 }
2128 
VLseg6e64(VRegister vd,XRegister rs1,VM vm)2129 void Riscv64Assembler::VLseg6e64(VRegister vd, XRegister rs1, VM vm) {
2130   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2131   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2132   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2133   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2134 }
2135 
VLseg7e8(VRegister vd,XRegister rs1,VM vm)2136 void Riscv64Assembler::VLseg7e8(VRegister vd, XRegister rs1, VM vm) {
2137   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2138   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2139   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2140   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2141 }
2142 
VLseg7e16(VRegister vd,XRegister rs1,VM vm)2143 void Riscv64Assembler::VLseg7e16(VRegister vd, XRegister rs1, VM vm) {
2144   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2145   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2146   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2147   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2148 }
2149 
VLseg7e32(VRegister vd,XRegister rs1,VM vm)2150 void Riscv64Assembler::VLseg7e32(VRegister vd, XRegister rs1, VM vm) {
2151   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2152   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2153   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2154   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2155 }
2156 
VLseg7e64(VRegister vd,XRegister rs1,VM vm)2157 void Riscv64Assembler::VLseg7e64(VRegister vd, XRegister rs1, VM vm) {
2158   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2159   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2160   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2161   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2162 }
2163 
VLseg8e8(VRegister vd,XRegister rs1,VM vm)2164 void Riscv64Assembler::VLseg8e8(VRegister vd, XRegister rs1, VM vm) {
2165   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2166   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2167   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2168   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2169 }
2170 
VLseg8e16(VRegister vd,XRegister rs1,VM vm)2171 void Riscv64Assembler::VLseg8e16(VRegister vd, XRegister rs1, VM vm) {
2172   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2173   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2174   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2175   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2176 }
2177 
VLseg8e32(VRegister vd,XRegister rs1,VM vm)2178 void Riscv64Assembler::VLseg8e32(VRegister vd, XRegister rs1, VM vm) {
2179   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2180   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2181   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2182   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2183 }
2184 
VLseg8e64(VRegister vd,XRegister rs1,VM vm)2185 void Riscv64Assembler::VLseg8e64(VRegister vd, XRegister rs1, VM vm) {
2186   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2187   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2188   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2189   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2190 }
2191 
VSseg2e8(VRegister vs3,XRegister rs1,VM vm)2192 void Riscv64Assembler::VSseg2e8(VRegister vs3, XRegister rs1, VM vm) {
2193   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2194   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2195   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2196 }
2197 
VSseg2e16(VRegister vs3,XRegister rs1,VM vm)2198 void Riscv64Assembler::VSseg2e16(VRegister vs3, XRegister rs1, VM vm) {
2199   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2200   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2201   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2202 }
2203 
VSseg2e32(VRegister vs3,XRegister rs1,VM vm)2204 void Riscv64Assembler::VSseg2e32(VRegister vs3, XRegister rs1, VM vm) {
2205   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2206   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2207   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2208 }
2209 
VSseg2e64(VRegister vs3,XRegister rs1,VM vm)2210 void Riscv64Assembler::VSseg2e64(VRegister vs3, XRegister rs1, VM vm) {
2211   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2212   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2213   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2214 }
2215 
VSseg3e8(VRegister vs3,XRegister rs1,VM vm)2216 void Riscv64Assembler::VSseg3e8(VRegister vs3, XRegister rs1, VM vm) {
2217   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2218   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2219   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2220 }
2221 
VSseg3e16(VRegister vs3,XRegister rs1,VM vm)2222 void Riscv64Assembler::VSseg3e16(VRegister vs3, XRegister rs1, VM vm) {
2223   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2224   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2225   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2226 }
2227 
VSseg3e32(VRegister vs3,XRegister rs1,VM vm)2228 void Riscv64Assembler::VSseg3e32(VRegister vs3, XRegister rs1, VM vm) {
2229   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2230   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2231   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2232 }
2233 
VSseg3e64(VRegister vs3,XRegister rs1,VM vm)2234 void Riscv64Assembler::VSseg3e64(VRegister vs3, XRegister rs1, VM vm) {
2235   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2236   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2237   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2238 }
2239 
VSseg4e8(VRegister vs3,XRegister rs1,VM vm)2240 void Riscv64Assembler::VSseg4e8(VRegister vs3, XRegister rs1, VM vm) {
2241   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2242   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2243   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2244 }
2245 
VSseg4e16(VRegister vs3,XRegister rs1,VM vm)2246 void Riscv64Assembler::VSseg4e16(VRegister vs3, XRegister rs1, VM vm) {
2247   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2248   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2249   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2250 }
2251 
VSseg4e32(VRegister vs3,XRegister rs1,VM vm)2252 void Riscv64Assembler::VSseg4e32(VRegister vs3, XRegister rs1, VM vm) {
2253   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2254   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2255   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2256 }
2257 
VSseg4e64(VRegister vs3,XRegister rs1,VM vm)2258 void Riscv64Assembler::VSseg4e64(VRegister vs3, XRegister rs1, VM vm) {
2259   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2260   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2261   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2262 }
2263 
VSseg5e8(VRegister vs3,XRegister rs1,VM vm)2264 void Riscv64Assembler::VSseg5e8(VRegister vs3, XRegister rs1, VM vm) {
2265   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2266   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2267   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2268 }
2269 
VSseg5e16(VRegister vs3,XRegister rs1,VM vm)2270 void Riscv64Assembler::VSseg5e16(VRegister vs3, XRegister rs1, VM vm) {
2271   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2272   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2273   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2274 }
2275 
VSseg5e32(VRegister vs3,XRegister rs1,VM vm)2276 void Riscv64Assembler::VSseg5e32(VRegister vs3, XRegister rs1, VM vm) {
2277   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2278   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2279   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2280 }
2281 
VSseg5e64(VRegister vs3,XRegister rs1,VM vm)2282 void Riscv64Assembler::VSseg5e64(VRegister vs3, XRegister rs1, VM vm) {
2283   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2284   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2285   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2286 }
2287 
VSseg6e8(VRegister vs3,XRegister rs1,VM vm)2288 void Riscv64Assembler::VSseg6e8(VRegister vs3, XRegister rs1, VM vm) {
2289   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2290   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2291   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2292 }
2293 
VSseg6e16(VRegister vs3,XRegister rs1,VM vm)2294 void Riscv64Assembler::VSseg6e16(VRegister vs3, XRegister rs1, VM vm) {
2295   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2296   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2297   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2298 }
2299 
VSseg6e32(VRegister vs3,XRegister rs1,VM vm)2300 void Riscv64Assembler::VSseg6e32(VRegister vs3, XRegister rs1, VM vm) {
2301   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2302   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2303   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2304 }
2305 
VSseg6e64(VRegister vs3,XRegister rs1,VM vm)2306 void Riscv64Assembler::VSseg6e64(VRegister vs3, XRegister rs1, VM vm) {
2307   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2308   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2309   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2310 }
2311 
VSseg7e8(VRegister vs3,XRegister rs1,VM vm)2312 void Riscv64Assembler::VSseg7e8(VRegister vs3, XRegister rs1, VM vm) {
2313   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2314   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2315   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2316 }
2317 
VSseg7e16(VRegister vs3,XRegister rs1,VM vm)2318 void Riscv64Assembler::VSseg7e16(VRegister vs3, XRegister rs1, VM vm) {
2319   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2320   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2321   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2322 }
2323 
VSseg7e32(VRegister vs3,XRegister rs1,VM vm)2324 void Riscv64Assembler::VSseg7e32(VRegister vs3, XRegister rs1, VM vm) {
2325   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2326   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2327   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2328 }
2329 
VSseg7e64(VRegister vs3,XRegister rs1,VM vm)2330 void Riscv64Assembler::VSseg7e64(VRegister vs3, XRegister rs1, VM vm) {
2331   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2332   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2333   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2334 }
2335 
VSseg8e8(VRegister vs3,XRegister rs1,VM vm)2336 void Riscv64Assembler::VSseg8e8(VRegister vs3, XRegister rs1, VM vm) {
2337   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2338   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2339   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2340 }
2341 
VSseg8e16(VRegister vs3,XRegister rs1,VM vm)2342 void Riscv64Assembler::VSseg8e16(VRegister vs3, XRegister rs1, VM vm) {
2343   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2344   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2345   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2346 }
2347 
VSseg8e32(VRegister vs3,XRegister rs1,VM vm)2348 void Riscv64Assembler::VSseg8e32(VRegister vs3, XRegister rs1, VM vm) {
2349   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2350   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2351   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2352 }
2353 
VSseg8e64(VRegister vs3,XRegister rs1,VM vm)2354 void Riscv64Assembler::VSseg8e64(VRegister vs3, XRegister rs1, VM vm) {
2355   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2356   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2357   EmitR(funct7, 0b00000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2358 }
2359 
VLseg2e8ff(VRegister vd,XRegister rs1,VM vm)2360 void Riscv64Assembler::VLseg2e8ff(VRegister vd, XRegister rs1, VM vm) {
2361   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2362   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2363   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2364   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2365 }
2366 
VLseg2e16ff(VRegister vd,XRegister rs1,VM vm)2367 void Riscv64Assembler::VLseg2e16ff(VRegister vd, XRegister rs1, VM vm) {
2368   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2369   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2370   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2371   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2372 }
2373 
VLseg2e32ff(VRegister vd,XRegister rs1,VM vm)2374 void Riscv64Assembler::VLseg2e32ff(VRegister vd, XRegister rs1, VM vm) {
2375   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2376   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2377   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2378   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2379 }
2380 
VLseg2e64ff(VRegister vd,XRegister rs1,VM vm)2381 void Riscv64Assembler::VLseg2e64ff(VRegister vd, XRegister rs1, VM vm) {
2382   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2383   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2384   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, vm);
2385   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2386 }
2387 
VLseg3e8ff(VRegister vd,XRegister rs1,VM vm)2388 void Riscv64Assembler::VLseg3e8ff(VRegister vd, XRegister rs1, VM vm) {
2389   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2390   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2391   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2392   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2393 }
2394 
VLseg3e16ff(VRegister vd,XRegister rs1,VM vm)2395 void Riscv64Assembler::VLseg3e16ff(VRegister vd, XRegister rs1, VM vm) {
2396   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2397   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2398   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2399   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2400 }
2401 
VLseg3e32ff(VRegister vd,XRegister rs1,VM vm)2402 void Riscv64Assembler::VLseg3e32ff(VRegister vd, XRegister rs1, VM vm) {
2403   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2404   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2405   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2406   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2407 }
2408 
VLseg3e64ff(VRegister vd,XRegister rs1,VM vm)2409 void Riscv64Assembler::VLseg3e64ff(VRegister vd, XRegister rs1, VM vm) {
2410   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2411   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2412   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kUnitStride, vm);
2413   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2414 }
2415 
VLseg4e8ff(VRegister vd,XRegister rs1,VM vm)2416 void Riscv64Assembler::VLseg4e8ff(VRegister vd, XRegister rs1, VM vm) {
2417   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2418   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2419   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2420   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2421 }
2422 
VLseg4e16ff(VRegister vd,XRegister rs1,VM vm)2423 void Riscv64Assembler::VLseg4e16ff(VRegister vd, XRegister rs1, VM vm) {
2424   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2425   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2426   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2427   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2428 }
2429 
VLseg4e32ff(VRegister vd,XRegister rs1,VM vm)2430 void Riscv64Assembler::VLseg4e32ff(VRegister vd, XRegister rs1, VM vm) {
2431   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2432   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2433   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2434   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2435 }
2436 
VLseg4e64ff(VRegister vd,XRegister rs1,VM vm)2437 void Riscv64Assembler::VLseg4e64ff(VRegister vd, XRegister rs1, VM vm) {
2438   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2439   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2440   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, vm);
2441   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2442 }
2443 
VLseg5e8ff(VRegister vd,XRegister rs1,VM vm)2444 void Riscv64Assembler::VLseg5e8ff(VRegister vd, XRegister rs1, VM vm) {
2445   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2446   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2447   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2448   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2449 }
2450 
VLseg5e16ff(VRegister vd,XRegister rs1,VM vm)2451 void Riscv64Assembler::VLseg5e16ff(VRegister vd, XRegister rs1, VM vm) {
2452   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2453   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2454   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2455   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2456 }
2457 
VLseg5e32ff(VRegister vd,XRegister rs1,VM vm)2458 void Riscv64Assembler::VLseg5e32ff(VRegister vd, XRegister rs1, VM vm) {
2459   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2460   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2461   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2462   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2463 }
2464 
VLseg5e64ff(VRegister vd,XRegister rs1,VM vm)2465 void Riscv64Assembler::VLseg5e64ff(VRegister vd, XRegister rs1, VM vm) {
2466   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2467   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2468   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kUnitStride, vm);
2469   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2470 }
2471 
VLseg6e8ff(VRegister vd,XRegister rs1,VM vm)2472 void Riscv64Assembler::VLseg6e8ff(VRegister vd, XRegister rs1, VM vm) {
2473   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2474   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2475   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2476   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2477 }
2478 
VLseg6e16ff(VRegister vd,XRegister rs1,VM vm)2479 void Riscv64Assembler::VLseg6e16ff(VRegister vd, XRegister rs1, VM vm) {
2480   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2481   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2482   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2483   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2484 }
2485 
VLseg6e32ff(VRegister vd,XRegister rs1,VM vm)2486 void Riscv64Assembler::VLseg6e32ff(VRegister vd, XRegister rs1, VM vm) {
2487   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2488   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2489   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2490   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2491 }
2492 
VLseg6e64ff(VRegister vd,XRegister rs1,VM vm)2493 void Riscv64Assembler::VLseg6e64ff(VRegister vd, XRegister rs1, VM vm) {
2494   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2495   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2496   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kUnitStride, vm);
2497   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2498 }
2499 
VLseg7e8ff(VRegister vd,XRegister rs1,VM vm)2500 void Riscv64Assembler::VLseg7e8ff(VRegister vd, XRegister rs1, VM vm) {
2501   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2502   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2503   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2504   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2505 }
2506 
VLseg7e16ff(VRegister vd,XRegister rs1,VM vm)2507 void Riscv64Assembler::VLseg7e16ff(VRegister vd, XRegister rs1, VM vm) {
2508   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2509   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2510   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2511   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2512 }
2513 
VLseg7e32ff(VRegister vd,XRegister rs1,VM vm)2514 void Riscv64Assembler::VLseg7e32ff(VRegister vd, XRegister rs1, VM vm) {
2515   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2516   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2517   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2518   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2519 }
2520 
VLseg7e64ff(VRegister vd,XRegister rs1,VM vm)2521 void Riscv64Assembler::VLseg7e64ff(VRegister vd, XRegister rs1, VM vm) {
2522   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2523   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2524   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kUnitStride, vm);
2525   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2526 }
2527 
VLseg8e8ff(VRegister vd,XRegister rs1,VM vm)2528 void Riscv64Assembler::VLseg8e8ff(VRegister vd, XRegister rs1, VM vm) {
2529   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2530   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2531   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2532   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2533 }
2534 
VLseg8e16ff(VRegister vd,XRegister rs1,VM vm)2535 void Riscv64Assembler::VLseg8e16ff(VRegister vd, XRegister rs1, VM vm) {
2536   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2537   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2538   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2539   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2540 }
2541 
VLseg8e32ff(VRegister vd,XRegister rs1,VM vm)2542 void Riscv64Assembler::VLseg8e32ff(VRegister vd, XRegister rs1, VM vm) {
2543   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2544   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2545   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2546   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2547 }
2548 
VLseg8e64ff(VRegister vd,XRegister rs1,VM vm)2549 void Riscv64Assembler::VLseg8e64ff(VRegister vd, XRegister rs1, VM vm) {
2550   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2551   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2552   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, vm);
2553   EmitR(funct7, 0b10000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2554 }
2555 
VLsseg2e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2556 void Riscv64Assembler::VLsseg2e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2557   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2558   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2559   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2560   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2561 }
2562 
VLsseg2e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2563 void Riscv64Assembler::VLsseg2e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2564   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2565   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2566   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2567   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2568 }
2569 
VLsseg2e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2570 void Riscv64Assembler::VLsseg2e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2571   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2572   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2573   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2574   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2575 }
2576 
VLsseg2e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2577 void Riscv64Assembler::VLsseg2e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2578   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2579   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2580   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2581   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2582 }
2583 
VLsseg3e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2584 void Riscv64Assembler::VLsseg3e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2585   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2586   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2587   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2588   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2589 }
2590 
VLsseg3e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2591 void Riscv64Assembler::VLsseg3e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2592   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2593   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2594   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2595   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2596 }
2597 
VLsseg3e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2598 void Riscv64Assembler::VLsseg3e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2599   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2600   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2601   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2602   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2603 }
2604 
VLsseg3e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2605 void Riscv64Assembler::VLsseg3e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2606   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2607   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2608   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2609   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2610 }
2611 
VLsseg4e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2612 void Riscv64Assembler::VLsseg4e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2613   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2614   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2615   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2616   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2617 }
2618 
VLsseg4e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2619 void Riscv64Assembler::VLsseg4e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2620   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2621   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2622   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2623   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2624 }
2625 
VLsseg4e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2626 void Riscv64Assembler::VLsseg4e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2627   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2628   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2629   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2630   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2631 }
2632 
VLsseg4e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2633 void Riscv64Assembler::VLsseg4e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2634   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2635   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2636   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2637   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2638 }
2639 
VLsseg5e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2640 void Riscv64Assembler::VLsseg5e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2641   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2642   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2643   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2644   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2645 }
2646 
VLsseg5e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2647 void Riscv64Assembler::VLsseg5e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2648   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2649   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2650   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2651   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2652 }
2653 
VLsseg5e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2654 void Riscv64Assembler::VLsseg5e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2655   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2656   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2657   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2658   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2659 }
2660 
VLsseg5e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2661 void Riscv64Assembler::VLsseg5e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2662   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2663   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2664   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2665   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2666 }
2667 
VLsseg6e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2668 void Riscv64Assembler::VLsseg6e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2669   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2670   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2671   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2672   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2673 }
2674 
VLsseg6e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2675 void Riscv64Assembler::VLsseg6e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2676   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2677   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2678   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2679   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2680 }
2681 
VLsseg6e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2682 void Riscv64Assembler::VLsseg6e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2683   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2684   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2685   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2686   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2687 }
2688 
VLsseg6e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2689 void Riscv64Assembler::VLsseg6e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2690   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2691   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2692   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2693   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2694 }
2695 
VLsseg7e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2696 void Riscv64Assembler::VLsseg7e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2697   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2698   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2699   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2700   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2701 }
2702 
VLsseg7e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2703 void Riscv64Assembler::VLsseg7e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2704   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2705   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2706   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2707   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2708 }
2709 
VLsseg7e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2710 void Riscv64Assembler::VLsseg7e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2711   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2712   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2713   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2714   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2715 }
2716 
VLsseg7e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2717 void Riscv64Assembler::VLsseg7e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2718   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2719   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2720   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2721   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2722 }
2723 
VLsseg8e8(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2724 void Riscv64Assembler::VLsseg8e8(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2725   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2726   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2727   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2728   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2729 }
2730 
VLsseg8e16(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2731 void Riscv64Assembler::VLsseg8e16(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2732   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2733   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2734   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2735   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2736 }
2737 
VLsseg8e32(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2738 void Riscv64Assembler::VLsseg8e32(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2739   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2740   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2741   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2742   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2743 }
2744 
VLsseg8e64(VRegister vd,XRegister rs1,XRegister rs2,VM vm)2745 void Riscv64Assembler::VLsseg8e64(VRegister vd, XRegister rs1, XRegister rs2, VM vm) {
2746   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2747   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2748   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2749   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2750 }
2751 
VSsseg2e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2752 void Riscv64Assembler::VSsseg2e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2753   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2754   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2755   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2756 }
2757 
VSsseg2e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2758 void Riscv64Assembler::VSsseg2e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2759   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2760   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2761   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2762 }
2763 
VSsseg2e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2764 void Riscv64Assembler::VSsseg2e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2765   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2766   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2767   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2768 }
2769 
VSsseg2e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2770 void Riscv64Assembler::VSsseg2e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2771   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2772   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kStrided, vm);
2773   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2774 }
2775 
VSsseg3e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2776 void Riscv64Assembler::VSsseg3e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2777   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2778   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2779   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2780 }
2781 
VSsseg3e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2782 void Riscv64Assembler::VSsseg3e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2783   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2784   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2785   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2786 }
2787 
VSsseg3e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2788 void Riscv64Assembler::VSsseg3e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2789   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2790   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2791   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2792 }
2793 
VSsseg3e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2794 void Riscv64Assembler::VSsseg3e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2795   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2796   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kStrided, vm);
2797   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2798 }
2799 
VSsseg4e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2800 void Riscv64Assembler::VSsseg4e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2801   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2802   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2803   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2804 }
2805 
VSsseg4e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2806 void Riscv64Assembler::VSsseg4e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2807   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2808   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2809   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2810 }
2811 
VSsseg4e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2812 void Riscv64Assembler::VSsseg4e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2813   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2814   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2815   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2816 }
2817 
VSsseg4e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2818 void Riscv64Assembler::VSsseg4e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2819   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2820   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kStrided, vm);
2821   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2822 }
2823 
VSsseg5e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2824 void Riscv64Assembler::VSsseg5e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2825   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2826   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2827   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2828 }
2829 
VSsseg5e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2830 void Riscv64Assembler::VSsseg5e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2831   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2832   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2833   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2834 }
2835 
VSsseg5e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2836 void Riscv64Assembler::VSsseg5e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2837   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2838   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2839   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2840 }
2841 
VSsseg5e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2842 void Riscv64Assembler::VSsseg5e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2843   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2844   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kStrided, vm);
2845   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2846 }
2847 
VSsseg6e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2848 void Riscv64Assembler::VSsseg6e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2849   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2850   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2851   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2852 }
2853 
VSsseg6e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2854 void Riscv64Assembler::VSsseg6e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2855   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2856   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2857   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2858 }
2859 
VSsseg6e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2860 void Riscv64Assembler::VSsseg6e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2861   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2862   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2863   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2864 }
2865 
VSsseg6e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2866 void Riscv64Assembler::VSsseg6e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2867   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2868   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kStrided, vm);
2869   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2870 }
2871 
VSsseg7e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2872 void Riscv64Assembler::VSsseg7e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2873   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2874   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2875   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2876 }
2877 
VSsseg7e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2878 void Riscv64Assembler::VSsseg7e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2879   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2880   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2881   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2882 }
2883 
VSsseg7e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2884 void Riscv64Assembler::VSsseg7e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2885   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2886   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2887   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2888 }
2889 
VSsseg7e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2890 void Riscv64Assembler::VSsseg7e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2891   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2892   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kStrided, vm);
2893   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2894 }
2895 
VSsseg8e8(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2896 void Riscv64Assembler::VSsseg8e8(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2897   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2898   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2899   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
2900 }
2901 
VSsseg8e16(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2902 void Riscv64Assembler::VSsseg8e16(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2903   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2904   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2905   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
2906 }
2907 
VSsseg8e32(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2908 void Riscv64Assembler::VSsseg8e32(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2909   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2910   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2911   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
2912 }
2913 
VSsseg8e64(VRegister vs3,XRegister rs1,XRegister rs2,VM vm)2914 void Riscv64Assembler::VSsseg8e64(VRegister vs3, XRegister rs1, XRegister rs2, VM vm) {
2915   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2916   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kStrided, vm);
2917   EmitR(funct7, rs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
2918 }
2919 
VLuxseg2ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2920 void Riscv64Assembler::VLuxseg2ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2921   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2922   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2923   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2924   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2925 }
2926 
VLuxseg2ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2927 void Riscv64Assembler::VLuxseg2ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2928   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2929   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2930   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2931   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2932 }
2933 
VLuxseg2ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2934 void Riscv64Assembler::VLuxseg2ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2935   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2936   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2937   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2938   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2939 }
2940 
VLuxseg2ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2941 void Riscv64Assembler::VLuxseg2ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2942   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2943   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2944   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
2945   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2946 }
2947 
VLuxseg3ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2948 void Riscv64Assembler::VLuxseg3ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2949   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2950   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2951   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
2952   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2953 }
2954 
VLuxseg3ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2955 void Riscv64Assembler::VLuxseg3ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2956   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2957   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2958   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
2959   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2960 }
2961 
VLuxseg3ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2962 void Riscv64Assembler::VLuxseg3ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2963   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2964   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2965   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
2966   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2967 }
2968 
VLuxseg3ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2969 void Riscv64Assembler::VLuxseg3ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2970   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2971   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2972   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
2973   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
2974 }
2975 
VLuxseg4ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2976 void Riscv64Assembler::VLuxseg4ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2977   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2978   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2979   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
2980   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
2981 }
2982 
VLuxseg4ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2983 void Riscv64Assembler::VLuxseg4ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2984   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2985   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2986   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
2987   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
2988 }
2989 
VLuxseg4ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2990 void Riscv64Assembler::VLuxseg4ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2991   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2992   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
2993   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
2994   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
2995 }
2996 
VLuxseg4ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)2997 void Riscv64Assembler::VLuxseg4ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
2998   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
2999   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3000   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3001   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3002 }
3003 
VLuxseg5ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3004 void Riscv64Assembler::VLuxseg5ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3005   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3006   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3007   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3008   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3009 }
3010 
VLuxseg5ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3011 void Riscv64Assembler::VLuxseg5ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3012   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3013   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3014   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3015   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3016 }
3017 
VLuxseg5ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3018 void Riscv64Assembler::VLuxseg5ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3019   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3020   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3021   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3022   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3023 }
3024 
VLuxseg5ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3025 void Riscv64Assembler::VLuxseg5ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3026   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3027   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3028   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3029   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3030 }
3031 
VLuxseg6ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3032 void Riscv64Assembler::VLuxseg6ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3033   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3034   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3035   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3036   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3037 }
3038 
VLuxseg6ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3039 void Riscv64Assembler::VLuxseg6ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3040   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3041   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3042   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3043   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3044 }
3045 
VLuxseg6ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3046 void Riscv64Assembler::VLuxseg6ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3047   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3048   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3049   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3050   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3051 }
3052 
VLuxseg6ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3053 void Riscv64Assembler::VLuxseg6ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3054   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3055   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3056   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3057   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3058 }
3059 
VLuxseg7ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3060 void Riscv64Assembler::VLuxseg7ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3061   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3062   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3063   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3064   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3065 }
3066 
VLuxseg7ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3067 void Riscv64Assembler::VLuxseg7ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3068   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3069   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3070   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3071   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3072 }
3073 
VLuxseg7ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3074 void Riscv64Assembler::VLuxseg7ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3075   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3076   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3077   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3078   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3079 }
3080 
VLuxseg7ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3081 void Riscv64Assembler::VLuxseg7ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3082   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3083   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3084   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3085   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3086 }
3087 
VLuxseg8ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3088 void Riscv64Assembler::VLuxseg8ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3089   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3090   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3091   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3092   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3093 }
3094 
VLuxseg8ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3095 void Riscv64Assembler::VLuxseg8ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3096   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3097   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3098   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3099   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3100 }
3101 
VLuxseg8ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3102 void Riscv64Assembler::VLuxseg8ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3103   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3104   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3105   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3106   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3107 }
3108 
VLuxseg8ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3109 void Riscv64Assembler::VLuxseg8ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3110   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3111   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3112   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3113   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3114 }
3115 
VSuxseg2ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3116 void Riscv64Assembler::VSuxseg2ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3117   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3118   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3119   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3120 }
3121 
VSuxseg2ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3122 void Riscv64Assembler::VSuxseg2ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3123   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3124   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3125   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3126 }
3127 
VSuxseg2ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3128 void Riscv64Assembler::VSuxseg2ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3129   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3130   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3131   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3132 }
3133 
VSuxseg2ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3134 void Riscv64Assembler::VSuxseg2ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3135   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3136   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedUnordered, vm);
3137   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3138 }
3139 
VSuxseg3ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3140 void Riscv64Assembler::VSuxseg3ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3141   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3142   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3143   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3144 }
3145 
VSuxseg3ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3146 void Riscv64Assembler::VSuxseg3ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3147   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3148   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3149   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3150 }
3151 
VSuxseg3ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3152 void Riscv64Assembler::VSuxseg3ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3153   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3154   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3155   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3156 }
3157 
VSuxseg3ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3158 void Riscv64Assembler::VSuxseg3ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3159   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3160   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedUnordered, vm);
3161   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3162 }
3163 
VSuxseg4ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3164 void Riscv64Assembler::VSuxseg4ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3165   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3166   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3167   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3168 }
3169 
VSuxseg4ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3170 void Riscv64Assembler::VSuxseg4ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3171   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3172   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3173   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3174 }
3175 
VSuxseg4ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3176 void Riscv64Assembler::VSuxseg4ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3177   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3178   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3179   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3180 }
3181 
VSuxseg4ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3182 void Riscv64Assembler::VSuxseg4ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3183   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3184   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedUnordered, vm);
3185   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3186 }
3187 
VSuxseg5ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3188 void Riscv64Assembler::VSuxseg5ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3189   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3190   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3191   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3192 }
3193 
VSuxseg5ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3194 void Riscv64Assembler::VSuxseg5ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3195   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3196   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3197   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3198 }
3199 
VSuxseg5ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3200 void Riscv64Assembler::VSuxseg5ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3201   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3202   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3203   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3204 }
3205 
VSuxseg5ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3206 void Riscv64Assembler::VSuxseg5ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3207   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3208   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedUnordered, vm);
3209   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3210 }
3211 
VSuxseg6ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3212 void Riscv64Assembler::VSuxseg6ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3213   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3214   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3215   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3216 }
3217 
VSuxseg6ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3218 void Riscv64Assembler::VSuxseg6ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3219   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3220   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3221   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3222 }
3223 
VSuxseg6ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3224 void Riscv64Assembler::VSuxseg6ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3225   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3226   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3227   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3228 }
3229 
VSuxseg6ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3230 void Riscv64Assembler::VSuxseg6ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3231   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3232   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedUnordered, vm);
3233   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3234 }
3235 
VSuxseg7ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3236 void Riscv64Assembler::VSuxseg7ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3237   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3238   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3239   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3240 }
3241 
VSuxseg7ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3242 void Riscv64Assembler::VSuxseg7ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3243   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3244   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3245   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3246 }
3247 
VSuxseg7ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3248 void Riscv64Assembler::VSuxseg7ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3249   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3250   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3251   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3252 }
3253 
VSuxseg7ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3254 void Riscv64Assembler::VSuxseg7ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3255   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3256   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedUnordered, vm);
3257   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3258 }
3259 
VSuxseg8ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3260 void Riscv64Assembler::VSuxseg8ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3261   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3262   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3263   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3264 }
3265 
VSuxseg8ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3266 void Riscv64Assembler::VSuxseg8ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3267   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3268   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3269   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3270 }
3271 
VSuxseg8ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3272 void Riscv64Assembler::VSuxseg8ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3273   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3274   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3275   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3276 }
3277 
VSuxseg8ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3278 void Riscv64Assembler::VSuxseg8ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3279   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3280   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedUnordered, vm);
3281   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3282 }
3283 
VLoxseg2ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3284 void Riscv64Assembler::VLoxseg2ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3285   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3286   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3287   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3288   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3289 }
3290 
VLoxseg2ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3291 void Riscv64Assembler::VLoxseg2ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3292   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3293   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3294   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3295   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3296 }
3297 
VLoxseg2ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3298 void Riscv64Assembler::VLoxseg2ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3299   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3300   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3301   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3302   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3303 }
3304 
VLoxseg2ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3305 void Riscv64Assembler::VLoxseg2ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3306   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3307   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3308   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3309   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3310 }
3311 
VLoxseg3ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3312 void Riscv64Assembler::VLoxseg3ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3313   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3314   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3315   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3316   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3317 }
3318 
VLoxseg3ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3319 void Riscv64Assembler::VLoxseg3ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3320   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3321   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3322   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3323   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3324 }
3325 
VLoxseg3ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3326 void Riscv64Assembler::VLoxseg3ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3327   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3328   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3329   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3330   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3331 }
3332 
VLoxseg3ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3333 void Riscv64Assembler::VLoxseg3ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3334   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3335   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3336   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3337   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3338 }
3339 
VLoxseg4ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3340 void Riscv64Assembler::VLoxseg4ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3341   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3342   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3343   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3344   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3345 }
3346 
VLoxseg4ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3347 void Riscv64Assembler::VLoxseg4ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3348   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3349   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3350   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3351   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3352 }
3353 
VLoxseg4ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3354 void Riscv64Assembler::VLoxseg4ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3355   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3356   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3357   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3358   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3359 }
3360 
VLoxseg4ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3361 void Riscv64Assembler::VLoxseg4ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3362   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3363   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3364   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3365   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3366 }
3367 
VLoxseg5ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3368 void Riscv64Assembler::VLoxseg5ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3369   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3370   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3371   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3372   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3373 }
3374 
VLoxseg5ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3375 void Riscv64Assembler::VLoxseg5ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3376   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3377   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3378   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3379   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3380 }
3381 
VLoxseg5ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3382 void Riscv64Assembler::VLoxseg5ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3383   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3384   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3385   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3386   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3387 }
3388 
VLoxseg5ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3389 void Riscv64Assembler::VLoxseg5ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3390   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3391   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3392   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3393   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3394 }
3395 
VLoxseg6ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3396 void Riscv64Assembler::VLoxseg6ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3397   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3398   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3399   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3400   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3401 }
3402 
VLoxseg6ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3403 void Riscv64Assembler::VLoxseg6ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3404   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3405   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3406   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3407   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3408 }
3409 
VLoxseg6ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3410 void Riscv64Assembler::VLoxseg6ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3411   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3412   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3413   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3414   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3415 }
3416 
VLoxseg6ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3417 void Riscv64Assembler::VLoxseg6ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3418   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3419   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3420   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3421   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3422 }
3423 
VLoxseg7ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3424 void Riscv64Assembler::VLoxseg7ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3425   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3426   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3427   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3428   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3429 }
3430 
VLoxseg7ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3431 void Riscv64Assembler::VLoxseg7ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3432   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3433   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3434   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3435   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3436 }
3437 
VLoxseg7ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3438 void Riscv64Assembler::VLoxseg7ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3439   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3440   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3441   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3442   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3443 }
3444 
VLoxseg7ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3445 void Riscv64Assembler::VLoxseg7ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3446   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3447   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3448   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3449   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3450 }
3451 
VLoxseg8ei8(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3452 void Riscv64Assembler::VLoxseg8ei8(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3453   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3454   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3455   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3456   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3457 }
3458 
VLoxseg8ei16(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3459 void Riscv64Assembler::VLoxseg8ei16(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3460   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3461   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3462   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3463   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3464 }
3465 
VLoxseg8ei32(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3466 void Riscv64Assembler::VLoxseg8ei32(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3467   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3468   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3469   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3470   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3471 }
3472 
VLoxseg8ei64(VRegister vd,XRegister rs1,VRegister vs2,VM vm)3473 void Riscv64Assembler::VLoxseg8ei64(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
3474   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3475   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3476   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3477   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3478 }
3479 
VSoxseg2ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3480 void Riscv64Assembler::VSoxseg2ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3481   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3482   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3483   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3484 }
3485 
VSoxseg2ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3486 void Riscv64Assembler::VSoxseg2ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3487   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3488   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3489   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3490 }
3491 
VSoxseg2ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3492 void Riscv64Assembler::VSoxseg2ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3493   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3494   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3495   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3496 }
3497 
VSoxseg2ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3498 void Riscv64Assembler::VSoxseg2ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3499   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3500   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kIndexedOrdered, vm);
3501   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3502 }
3503 
VSoxseg3ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3504 void Riscv64Assembler::VSoxseg3ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3505   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3506   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3507   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3508 }
3509 
VSoxseg3ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3510 void Riscv64Assembler::VSoxseg3ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3511   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3512   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3513   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3514 }
3515 
VSoxseg3ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3516 void Riscv64Assembler::VSoxseg3ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3517   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3518   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3519   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3520 }
3521 
VSoxseg3ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3522 void Riscv64Assembler::VSoxseg3ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3523   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3524   const uint32_t funct7 = EncodeRVVMemF7(Nf::k3, 0x0, MemAddressMode::kIndexedOrdered, vm);
3525   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3526 }
3527 
VSoxseg4ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3528 void Riscv64Assembler::VSoxseg4ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3529   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3530   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3531   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3532 }
3533 
VSoxseg4ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3534 void Riscv64Assembler::VSoxseg4ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3535   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3536   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3537   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3538 }
3539 
VSoxseg4ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3540 void Riscv64Assembler::VSoxseg4ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3541   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3542   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3543   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3544 }
3545 
VSoxseg4ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3546 void Riscv64Assembler::VSoxseg4ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3547   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3548   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kIndexedOrdered, vm);
3549   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3550 }
3551 
VSoxseg5ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3552 void Riscv64Assembler::VSoxseg5ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3553   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3554   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3555   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3556 }
3557 
VSoxseg5ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3558 void Riscv64Assembler::VSoxseg5ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3559   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3560   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3561   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3562 }
3563 
VSoxseg5ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3564 void Riscv64Assembler::VSoxseg5ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3565   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3566   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3567   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3568 }
3569 
VSoxseg5ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3570 void Riscv64Assembler::VSoxseg5ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3571   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3572   const uint32_t funct7 = EncodeRVVMemF7(Nf::k5, 0x0, MemAddressMode::kIndexedOrdered, vm);
3573   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3574 }
3575 
VSoxseg6ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3576 void Riscv64Assembler::VSoxseg6ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3577   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3578   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3579   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3580 }
3581 
VSoxseg6ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3582 void Riscv64Assembler::VSoxseg6ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3583   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3584   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3585   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3586 }
3587 
VSoxseg6ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3588 void Riscv64Assembler::VSoxseg6ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3589   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3590   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3591   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3592 }
3593 
VSoxseg6ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3594 void Riscv64Assembler::VSoxseg6ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3595   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3596   const uint32_t funct7 = EncodeRVVMemF7(Nf::k6, 0x0, MemAddressMode::kIndexedOrdered, vm);
3597   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3598 }
3599 
VSoxseg7ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3600 void Riscv64Assembler::VSoxseg7ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3601   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3602   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3603   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3604 }
3605 
VSoxseg7ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3606 void Riscv64Assembler::VSoxseg7ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3607   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3608   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3609   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3610 }
3611 
VSoxseg7ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3612 void Riscv64Assembler::VSoxseg7ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3613   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3614   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3615   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3616 }
3617 
VSoxseg7ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3618 void Riscv64Assembler::VSoxseg7ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3619   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3620   const uint32_t funct7 = EncodeRVVMemF7(Nf::k7, 0x0, MemAddressMode::kIndexedOrdered, vm);
3621   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3622 }
3623 
VSoxseg8ei8(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3624 void Riscv64Assembler::VSoxseg8ei8(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3625   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3626   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3627   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k8), vs3, 0x27);
3628 }
3629 
VSoxseg8ei16(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3630 void Riscv64Assembler::VSoxseg8ei16(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3631   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3632   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3633   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k16), vs3, 0x27);
3634 }
3635 
VSoxseg8ei32(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3636 void Riscv64Assembler::VSoxseg8ei32(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3637   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3638   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3639   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k32), vs3, 0x27);
3640 }
3641 
VSoxseg8ei64(VRegister vs3,XRegister rs1,VRegister vs2,VM vm)3642 void Riscv64Assembler::VSoxseg8ei64(VRegister vs3, XRegister rs1, VRegister vs2, VM vm) {
3643   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3644   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kIndexedOrdered, vm);
3645   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VectorWidth::k64), vs3, 0x27);
3646 }
3647 
VL1re8(VRegister vd,XRegister rs1)3648 void Riscv64Assembler::VL1re8(VRegister vd, XRegister rs1) {
3649   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3650   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3651   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3652 }
3653 
VL1re16(VRegister vd,XRegister rs1)3654 void Riscv64Assembler::VL1re16(VRegister vd, XRegister rs1) {
3655   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3656   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3657   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3658 }
3659 
VL1re32(VRegister vd,XRegister rs1)3660 void Riscv64Assembler::VL1re32(VRegister vd, XRegister rs1) {
3661   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3662   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3663   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3664 }
3665 
VL1re64(VRegister vd,XRegister rs1)3666 void Riscv64Assembler::VL1re64(VRegister vd, XRegister rs1) {
3667   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3668   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3669   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3670 }
3671 
VL2re8(VRegister vd,XRegister rs1)3672 void Riscv64Assembler::VL2re8(VRegister vd, XRegister rs1) {
3673   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3674   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3675   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3676   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3677 }
3678 
VL2re16(VRegister vd,XRegister rs1)3679 void Riscv64Assembler::VL2re16(VRegister vd, XRegister rs1) {
3680   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3681   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3682   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3683   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3684 }
3685 
VL2re32(VRegister vd,XRegister rs1)3686 void Riscv64Assembler::VL2re32(VRegister vd, XRegister rs1) {
3687   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3688   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3689   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3690   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3691 }
3692 
VL2re64(VRegister vd,XRegister rs1)3693 void Riscv64Assembler::VL2re64(VRegister vd, XRegister rs1) {
3694   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3695   DCHECK_EQ((enum_cast<uint32_t>(vd) % 2), 0U);
3696   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3697   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3698 }
3699 
VL4re8(VRegister vd,XRegister rs1)3700 void Riscv64Assembler::VL4re8(VRegister vd, XRegister rs1) {
3701   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3702   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3703   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3704   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3705 }
3706 
VL4re16(VRegister vd,XRegister rs1)3707 void Riscv64Assembler::VL4re16(VRegister vd, XRegister rs1) {
3708   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3709   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3710   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3711   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3712 }
3713 
VL4re32(VRegister vd,XRegister rs1)3714 void Riscv64Assembler::VL4re32(VRegister vd, XRegister rs1) {
3715   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3716   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3717   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3718   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3719 }
3720 
VL4re64(VRegister vd,XRegister rs1)3721 void Riscv64Assembler::VL4re64(VRegister vd, XRegister rs1) {
3722   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3723   DCHECK_EQ((enum_cast<uint32_t>(vd) % 4), 0U);
3724   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3725   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3726 }
3727 
VL8re8(VRegister vd,XRegister rs1)3728 void Riscv64Assembler::VL8re8(VRegister vd, XRegister rs1) {
3729   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3730   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3731   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3732   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k8), vd, 0x7);
3733 }
3734 
VL8re16(VRegister vd,XRegister rs1)3735 void Riscv64Assembler::VL8re16(VRegister vd, XRegister rs1) {
3736   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3737   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3738   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3739   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k16), vd, 0x7);
3740 }
3741 
VL8re32(VRegister vd,XRegister rs1)3742 void Riscv64Assembler::VL8re32(VRegister vd, XRegister rs1) {
3743   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3744   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3745   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3746   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k32), vd, 0x7);
3747 }
3748 
VL8re64(VRegister vd,XRegister rs1)3749 void Riscv64Assembler::VL8re64(VRegister vd, XRegister rs1) {
3750   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3751   DCHECK_EQ((enum_cast<uint32_t>(vd) % 8), 0U);
3752   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3753   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::k64), vd, 0x7);
3754 }
3755 
VL1r(VRegister vd,XRegister rs1)3756 void Riscv64Assembler::VL1r(VRegister vd, XRegister rs1) { VL1re8(vd, rs1); }
3757 
VL2r(VRegister vd,XRegister rs1)3758 void Riscv64Assembler::VL2r(VRegister vd, XRegister rs1) { VL2re8(vd, rs1); }
3759 
VL4r(VRegister vd,XRegister rs1)3760 void Riscv64Assembler::VL4r(VRegister vd, XRegister rs1) { VL4re8(vd, rs1); }
3761 
VL8r(VRegister vd,XRegister rs1)3762 void Riscv64Assembler::VL8r(VRegister vd, XRegister rs1) { VL8re8(vd, rs1); }
3763 
VS1r(VRegister vs3,XRegister rs1)3764 void Riscv64Assembler::VS1r(VRegister vs3, XRegister rs1) {
3765   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3766   const uint32_t funct7 = EncodeRVVMemF7(Nf::k1, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3767   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3768 }
3769 
VS2r(VRegister vs3,XRegister rs1)3770 void Riscv64Assembler::VS2r(VRegister vs3, XRegister rs1) {
3771   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3772   const uint32_t funct7 = EncodeRVVMemF7(Nf::k2, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3773   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3774 }
3775 
VS4r(VRegister vs3,XRegister rs1)3776 void Riscv64Assembler::VS4r(VRegister vs3, XRegister rs1) {
3777   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3778   const uint32_t funct7 = EncodeRVVMemF7(Nf::k4, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3779   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3780 }
3781 
VS8r(VRegister vs3,XRegister rs1)3782 void Riscv64Assembler::VS8r(VRegister vs3, XRegister rs1) {
3783   AssertExtensionsEnabled(Riscv64Extension::kLoadStore, Riscv64Extension::kV);
3784   const uint32_t funct7 = EncodeRVVMemF7(Nf::k8, 0x0, MemAddressMode::kUnitStride, VM::kUnmasked);
3785   EmitR(funct7, 0b01000u, rs1, enum_cast<uint32_t>(VectorWidth::kWholeR), vs3, 0x27);
3786 }
3787 
3788 /////////////////////////////// RVV Load/Store Instructions  END //////////////////////////////
3789 
3790 /////////////////////////////// RVV Arithmetic Instructions  START ////////////////////////////
3791 
VAdd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3792 void Riscv64Assembler::VAdd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3793   AssertExtensionsEnabled(Riscv64Extension::kV);
3794   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3795   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
3796   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3797 }
3798 
VAdd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3799 void Riscv64Assembler::VAdd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3800   AssertExtensionsEnabled(Riscv64Extension::kV);
3801   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3802   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
3803   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3804 }
3805 
VAdd_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3806 void Riscv64Assembler::VAdd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3807   AssertExtensionsEnabled(Riscv64Extension::kV);
3808   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3809   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
3810   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3811 }
3812 
VSub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3813 void Riscv64Assembler::VSub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3814   AssertExtensionsEnabled(Riscv64Extension::kV);
3815   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3816   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
3817   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3818 }
3819 
VSub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3820 void Riscv64Assembler::VSub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3821   AssertExtensionsEnabled(Riscv64Extension::kV);
3822   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3823   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
3824   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3825 }
3826 
VRsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3827 void Riscv64Assembler::VRsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3828   AssertExtensionsEnabled(Riscv64Extension::kV);
3829   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3830   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
3831   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3832 }
3833 
VRsub_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3834 void Riscv64Assembler::VRsub_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3835   AssertExtensionsEnabled(Riscv64Extension::kV);
3836   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3837   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
3838   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3839 }
3840 
VNeg_v(VRegister vd,VRegister vs2)3841 void Riscv64Assembler::VNeg_v(VRegister vd, VRegister vs2) { VRsub_vx(vd, vs2, Zero); }
3842 
VMinu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3843 void Riscv64Assembler::VMinu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3844   AssertExtensionsEnabled(Riscv64Extension::kV);
3845   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3846   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
3847   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3848 }
3849 
VMinu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3850 void Riscv64Assembler::VMinu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3851   AssertExtensionsEnabled(Riscv64Extension::kV);
3852   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3853   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
3854   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3855 }
3856 
VMin_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3857 void Riscv64Assembler::VMin_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3858   AssertExtensionsEnabled(Riscv64Extension::kV);
3859   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3860   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
3861   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3862 }
3863 
VMin_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3864 void Riscv64Assembler::VMin_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3865   AssertExtensionsEnabled(Riscv64Extension::kV);
3866   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3867   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
3868   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3869 }
3870 
VMaxu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3871 void Riscv64Assembler::VMaxu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3872   AssertExtensionsEnabled(Riscv64Extension::kV);
3873   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3874   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
3875   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3876 }
3877 
VMaxu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3878 void Riscv64Assembler::VMaxu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3879   AssertExtensionsEnabled(Riscv64Extension::kV);
3880   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3881   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
3882   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3883 }
3884 
VMax_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3885 void Riscv64Assembler::VMax_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3886   AssertExtensionsEnabled(Riscv64Extension::kV);
3887   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3888   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
3889   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3890 }
3891 
VMax_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3892 void Riscv64Assembler::VMax_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3893   AssertExtensionsEnabled(Riscv64Extension::kV);
3894   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3895   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
3896   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3897 }
3898 
VAnd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3899 void Riscv64Assembler::VAnd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3900   AssertExtensionsEnabled(Riscv64Extension::kV);
3901   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3902   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
3903   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3904 }
3905 
VAnd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3906 void Riscv64Assembler::VAnd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3907   AssertExtensionsEnabled(Riscv64Extension::kV);
3908   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3909   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
3910   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3911 }
3912 
VAnd_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3913 void Riscv64Assembler::VAnd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3914   AssertExtensionsEnabled(Riscv64Extension::kV);
3915   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3916   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
3917   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3918 }
3919 
VOr_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3920 void Riscv64Assembler::VOr_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3921   AssertExtensionsEnabled(Riscv64Extension::kV);
3922   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3923   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
3924   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3925 }
3926 
VOr_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3927 void Riscv64Assembler::VOr_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3928   AssertExtensionsEnabled(Riscv64Extension::kV);
3929   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
3930   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3931 }
3932 
VOr_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3933 void Riscv64Assembler::VOr_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3934   AssertExtensionsEnabled(Riscv64Extension::kV);
3935   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3936   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
3937   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3938 }
3939 
VXor_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3940 void Riscv64Assembler::VXor_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3941   AssertExtensionsEnabled(Riscv64Extension::kV);
3942   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3943   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
3944   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3945 }
3946 
VXor_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3947 void Riscv64Assembler::VXor_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3948   AssertExtensionsEnabled(Riscv64Extension::kV);
3949   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3950   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
3951   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3952 }
3953 
VXor_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)3954 void Riscv64Assembler::VXor_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
3955   AssertExtensionsEnabled(Riscv64Extension::kV);
3956   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3957   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
3958   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3959 }
3960 
VNot_v(VRegister vd,VRegister vs2,VM vm)3961 void Riscv64Assembler::VNot_v(VRegister vd, VRegister vs2, VM vm) { VXor_vi(vd, vs2, -1, vm); }
3962 
VRgather_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)3963 void Riscv64Assembler::VRgather_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
3964   AssertExtensionsEnabled(Riscv64Extension::kV);
3965   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3966   DCHECK(vd != vs1);
3967   DCHECK(vd != vs2);
3968   const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
3969   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
3970 }
3971 
VRgather_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3972 void Riscv64Assembler::VRgather_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3973   AssertExtensionsEnabled(Riscv64Extension::kV);
3974   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3975   DCHECK(vd != vs2);
3976   const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
3977   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3978 }
3979 
VRgather_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)3980 void Riscv64Assembler::VRgather_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
3981   AssertExtensionsEnabled(Riscv64Extension::kV);
3982   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3983   DCHECK(vd != vs2);
3984   const uint32_t funct7 = EncodeRVVF7(0b001100, vm);
3985   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
3986 }
3987 
VSlideup_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)3988 void Riscv64Assembler::VSlideup_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
3989   AssertExtensionsEnabled(Riscv64Extension::kV);
3990   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3991   DCHECK(vd != vs2);
3992   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
3993   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
3994 }
3995 
VSlideup_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)3996 void Riscv64Assembler::VSlideup_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
3997   AssertExtensionsEnabled(Riscv64Extension::kV);
3998   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
3999   DCHECK(vd != vs2);
4000   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
4001   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4002 }
4003 
VRgatherei16_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4004 void Riscv64Assembler::VRgatherei16_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4005   AssertExtensionsEnabled(Riscv64Extension::kV);
4006   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4007   DCHECK(vd != vs1);
4008   DCHECK(vd != vs2);
4009   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
4010   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4011 }
4012 
VSlidedown_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4013 void Riscv64Assembler::VSlidedown_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4014   AssertExtensionsEnabled(Riscv64Extension::kV);
4015   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4016   DCHECK(vd != vs2);
4017   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
4018   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4019 }
4020 
VSlidedown_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4021 void Riscv64Assembler::VSlidedown_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4022   AssertExtensionsEnabled(Riscv64Extension::kV);
4023   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4024   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
4025   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4026 }
4027 
VAdc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4028 void Riscv64Assembler::VAdc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4029   AssertExtensionsEnabled(Riscv64Extension::kV);
4030   DCHECK(vd != V0);
4031   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
4032   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4033 }
4034 
VAdc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4035 void Riscv64Assembler::VAdc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4036   AssertExtensionsEnabled(Riscv64Extension::kV);
4037   DCHECK(vd != V0);
4038   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
4039   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4040 }
4041 
VAdc_vim(VRegister vd,VRegister vs2,int32_t imm5)4042 void Riscv64Assembler::VAdc_vim(VRegister vd, VRegister vs2, int32_t imm5) {
4043   AssertExtensionsEnabled(Riscv64Extension::kV);
4044   DCHECK(vd != V0);
4045   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kV0_t);
4046   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4047 }
4048 
VMadc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4049 void Riscv64Assembler::VMadc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4050   AssertExtensionsEnabled(Riscv64Extension::kV);
4051   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
4052   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4053 }
4054 
VMadc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4055 void Riscv64Assembler::VMadc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4056   AssertExtensionsEnabled(Riscv64Extension::kV);
4057   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
4058   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4059 }
4060 
VMadc_vim(VRegister vd,VRegister vs2,int32_t imm5)4061 void Riscv64Assembler::VMadc_vim(VRegister vd, VRegister vs2, int32_t imm5) {
4062   AssertExtensionsEnabled(Riscv64Extension::kV);
4063   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kV0_t);
4064   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4065 }
4066 
VMadc_vv(VRegister vd,VRegister vs2,VRegister vs1)4067 void Riscv64Assembler::VMadc_vv(VRegister vd, VRegister vs2, VRegister vs1) {
4068   AssertExtensionsEnabled(Riscv64Extension::kV);
4069   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
4070   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4071 }
4072 
VMadc_vx(VRegister vd,VRegister vs2,XRegister rs1)4073 void Riscv64Assembler::VMadc_vx(VRegister vd, VRegister vs2, XRegister rs1) {
4074   AssertExtensionsEnabled(Riscv64Extension::kV);
4075   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
4076   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4077 }
4078 
VMadc_vi(VRegister vd,VRegister vs2,int32_t imm5)4079 void Riscv64Assembler::VMadc_vi(VRegister vd, VRegister vs2, int32_t imm5) {
4080   AssertExtensionsEnabled(Riscv64Extension::kV);
4081   const uint32_t funct7 = EncodeRVVF7(0b010001, VM::kUnmasked);
4082   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4083 }
4084 
VSbc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4085 void Riscv64Assembler::VSbc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4086   AssertExtensionsEnabled(Riscv64Extension::kV);
4087   DCHECK(vd != V0);
4088   const uint32_t funct7 = EncodeRVVF7(0b010010, VM::kV0_t);
4089   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4090 }
4091 
VSbc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4092 void Riscv64Assembler::VSbc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4093   AssertExtensionsEnabled(Riscv64Extension::kV);
4094   DCHECK(vd != V0);
4095   const uint32_t funct7 = EncodeRVVF7(0b010010, VM::kV0_t);
4096   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4097 }
4098 
VMsbc_vvm(VRegister vd,VRegister vs2,VRegister vs1)4099 void Riscv64Assembler::VMsbc_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4100   AssertExtensionsEnabled(Riscv64Extension::kV);
4101   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kV0_t);
4102   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4103 }
4104 
VMsbc_vxm(VRegister vd,VRegister vs2,XRegister rs1)4105 void Riscv64Assembler::VMsbc_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4106   AssertExtensionsEnabled(Riscv64Extension::kV);
4107   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kV0_t);
4108   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4109 }
4110 
VMsbc_vv(VRegister vd,VRegister vs2,VRegister vs1)4111 void Riscv64Assembler::VMsbc_vv(VRegister vd, VRegister vs2, VRegister vs1) {
4112   AssertExtensionsEnabled(Riscv64Extension::kV);
4113   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kUnmasked);
4114   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4115 }
4116 
VMsbc_vx(VRegister vd,VRegister vs2,XRegister rs1)4117 void Riscv64Assembler::VMsbc_vx(VRegister vd, VRegister vs2, XRegister rs1) {
4118   AssertExtensionsEnabled(Riscv64Extension::kV);
4119   const uint32_t funct7 = EncodeRVVF7(0b010011, VM::kUnmasked);
4120   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4121 }
4122 
VMerge_vvm(VRegister vd,VRegister vs2,VRegister vs1)4123 void Riscv64Assembler::VMerge_vvm(VRegister vd, VRegister vs2, VRegister vs1) {
4124   AssertExtensionsEnabled(Riscv64Extension::kV);
4125   DCHECK(vd != V0);
4126   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
4127   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4128 }
4129 
VMerge_vxm(VRegister vd,VRegister vs2,XRegister rs1)4130 void Riscv64Assembler::VMerge_vxm(VRegister vd, VRegister vs2, XRegister rs1) {
4131   AssertExtensionsEnabled(Riscv64Extension::kV);
4132   DCHECK(vd != V0);
4133   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
4134   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4135 }
4136 
VMerge_vim(VRegister vd,VRegister vs2,int32_t imm5)4137 void Riscv64Assembler::VMerge_vim(VRegister vd, VRegister vs2, int32_t imm5) {
4138   AssertExtensionsEnabled(Riscv64Extension::kV);
4139   DCHECK(vd != V0);
4140   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
4141   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4142 }
4143 
VMv_vv(VRegister vd,VRegister vs1)4144 void Riscv64Assembler::VMv_vv(VRegister vd, VRegister vs1) {
4145   AssertExtensionsEnabled(Riscv64Extension::kV);
4146   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4147   EmitR(funct7, V0, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4148 }
4149 
VMv_vx(VRegister vd,XRegister rs1)4150 void Riscv64Assembler::VMv_vx(VRegister vd, XRegister rs1) {
4151   AssertExtensionsEnabled(Riscv64Extension::kV);
4152   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4153   EmitR(funct7, V0, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4154 }
4155 
VMv_vi(VRegister vd,int32_t imm5)4156 void Riscv64Assembler::VMv_vi(VRegister vd, int32_t imm5) {
4157   AssertExtensionsEnabled(Riscv64Extension::kV);
4158   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4159   EmitR(funct7, V0, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4160 }
4161 
VMseq_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4162 void Riscv64Assembler::VMseq_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4163   AssertExtensionsEnabled(Riscv64Extension::kV);
4164   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4165   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
4166   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4167 }
4168 
VMseq_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4169 void Riscv64Assembler::VMseq_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4170   AssertExtensionsEnabled(Riscv64Extension::kV);
4171   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4172   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
4173   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4174 }
4175 
VMseq_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4176 void Riscv64Assembler::VMseq_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4177   AssertExtensionsEnabled(Riscv64Extension::kV);
4178   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4179   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
4180   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4181 }
4182 
VMsne_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4183 void Riscv64Assembler::VMsne_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4184   AssertExtensionsEnabled(Riscv64Extension::kV);
4185   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4186   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
4187   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4188 }
4189 
VMsne_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4190 void Riscv64Assembler::VMsne_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4191   AssertExtensionsEnabled(Riscv64Extension::kV);
4192   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4193   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
4194   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4195 }
4196 
VMsne_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4197 void Riscv64Assembler::VMsne_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4198   AssertExtensionsEnabled(Riscv64Extension::kV);
4199   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4200   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
4201   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4202 }
4203 
VMsltu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4204 void Riscv64Assembler::VMsltu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4205   AssertExtensionsEnabled(Riscv64Extension::kV);
4206   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4207   const uint32_t funct7 = EncodeRVVF7(0b011010, vm);
4208   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4209 }
4210 
VMsltu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4211 void Riscv64Assembler::VMsltu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4212   AssertExtensionsEnabled(Riscv64Extension::kV);
4213   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4214   const uint32_t funct7 = EncodeRVVF7(0b011010, vm);
4215   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4216 }
4217 
VMsgtu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4218 void Riscv64Assembler::VMsgtu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4219   AssertExtensionsEnabled(Riscv64Extension::kV);
4220   VMsltu_vv(vd, vs1, vs2, vm);
4221 }
4222 
VMslt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4223 void Riscv64Assembler::VMslt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4224   AssertExtensionsEnabled(Riscv64Extension::kV);
4225   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4226   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
4227   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4228 }
4229 
VMslt_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4230 void Riscv64Assembler::VMslt_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4231   AssertExtensionsEnabled(Riscv64Extension::kV);
4232   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4233   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
4234   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4235 }
4236 
VMsgt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4237 void Riscv64Assembler::VMsgt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4238   VMslt_vv(vd, vs1, vs2, vm);
4239 }
4240 
VMsleu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4241 void Riscv64Assembler::VMsleu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4242   AssertExtensionsEnabled(Riscv64Extension::kV);
4243   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4244   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
4245   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4246 }
4247 
VMsleu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4248 void Riscv64Assembler::VMsleu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4249   AssertExtensionsEnabled(Riscv64Extension::kV);
4250   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4251   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
4252   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4253 }
4254 
VMsleu_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4255 void Riscv64Assembler::VMsleu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4256   AssertExtensionsEnabled(Riscv64Extension::kV);
4257   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4258   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
4259   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4260 }
4261 
VMsgeu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4262 void Riscv64Assembler::VMsgeu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4263   VMsleu_vv(vd, vs1, vs2, vm);
4264 }
4265 
VMsltu_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4266 void Riscv64Assembler::VMsltu_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4267   CHECK(IsUint<4>(aimm5 - 1)) << "Should be between [1, 16]" << aimm5;
4268   VMsleu_vi(vd, vs2, aimm5 - 1, vm);
4269 }
4270 
VMsle_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4271 void Riscv64Assembler::VMsle_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4272   AssertExtensionsEnabled(Riscv64Extension::kV);
4273   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4274   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
4275   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4276 }
4277 
VMsle_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4278 void Riscv64Assembler::VMsle_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4279   AssertExtensionsEnabled(Riscv64Extension::kV);
4280   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4281   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
4282   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4283 }
4284 
VMsle_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4285 void Riscv64Assembler::VMsle_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4286   AssertExtensionsEnabled(Riscv64Extension::kV);
4287   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4288   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
4289   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4290 }
4291 
VMsge_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4292 void Riscv64Assembler::VMsge_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4293   VMsle_vv(vd, vs1, vs2, vm);
4294 }
4295 
VMslt_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4296 void Riscv64Assembler::VMslt_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4297   VMsle_vi(vd, vs2, aimm5 - 1, vm);
4298 }
4299 
VMsgtu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4300 void Riscv64Assembler::VMsgtu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4301   AssertExtensionsEnabled(Riscv64Extension::kV);
4302   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4303   const uint32_t funct7 = EncodeRVVF7(0b011110, vm);
4304   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4305 }
4306 
VMsgtu_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4307 void Riscv64Assembler::VMsgtu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4308   AssertExtensionsEnabled(Riscv64Extension::kV);
4309   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4310   const uint32_t funct7 = EncodeRVVF7(0b011110, vm);
4311   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4312 }
4313 
VMsgeu_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4314 void Riscv64Assembler::VMsgeu_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4315   AssertExtensionsEnabled(Riscv64Extension::kV);
4316   CHECK(IsUint<4>(aimm5 - 1)) << "Should be between [1, 16]" << aimm5;
4317   VMsgtu_vi(vd, vs2, aimm5 - 1, vm);
4318 }
4319 
VMsgt_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4320 void Riscv64Assembler::VMsgt_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4321   AssertExtensionsEnabled(Riscv64Extension::kV);
4322   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4323   const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
4324   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4325 }
4326 
VMsgt_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4327 void Riscv64Assembler::VMsgt_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4328   AssertExtensionsEnabled(Riscv64Extension::kV);
4329   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4330   const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
4331   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4332 }
4333 
VMsge_vi(VRegister vd,VRegister vs2,int32_t aimm5,VM vm)4334 void Riscv64Assembler::VMsge_vi(VRegister vd, VRegister vs2, int32_t aimm5, VM vm) {
4335   VMsgt_vi(vd, vs2, aimm5 - 1, vm);
4336 }
4337 
VSaddu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4338 void Riscv64Assembler::VSaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4339   AssertExtensionsEnabled(Riscv64Extension::kV);
4340   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4341   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4342   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4343 }
4344 
VSaddu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4345 void Riscv64Assembler::VSaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4346   AssertExtensionsEnabled(Riscv64Extension::kV);
4347   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4348   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4349   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4350 }
4351 
VSaddu_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4352 void Riscv64Assembler::VSaddu_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4353   AssertExtensionsEnabled(Riscv64Extension::kV);
4354   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4355   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4356   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4357 }
4358 
VSadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4359 void Riscv64Assembler::VSadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4360   AssertExtensionsEnabled(Riscv64Extension::kV);
4361   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4362   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4363   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4364 }
4365 
VSadd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4366 void Riscv64Assembler::VSadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4367   AssertExtensionsEnabled(Riscv64Extension::kV);
4368   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4369   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4370   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4371 }
4372 
VSadd_vi(VRegister vd,VRegister vs2,int32_t imm5,VM vm)4373 void Riscv64Assembler::VSadd_vi(VRegister vd, VRegister vs2, int32_t imm5, VM vm) {
4374   AssertExtensionsEnabled(Riscv64Extension::kV);
4375   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4376   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4377   EmitR(funct7, vs2, EncodeInt5(imm5), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4378 }
4379 
VSsubu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4380 void Riscv64Assembler::VSsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4381   AssertExtensionsEnabled(Riscv64Extension::kV);
4382   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4383   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4384   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4385 }
4386 
VSsubu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4387 void Riscv64Assembler::VSsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4388   AssertExtensionsEnabled(Riscv64Extension::kV);
4389   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4390   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4391   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4392 }
4393 
VSsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4394 void Riscv64Assembler::VSsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4395   AssertExtensionsEnabled(Riscv64Extension::kV);
4396   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4397   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4398   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4399 }
4400 
VSsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4401 void Riscv64Assembler::VSsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4402   AssertExtensionsEnabled(Riscv64Extension::kV);
4403   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4404   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4405   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4406 }
4407 
VSll_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4408 void Riscv64Assembler::VSll_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4409   AssertExtensionsEnabled(Riscv64Extension::kV);
4410   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4411   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4412   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4413 }
4414 
VSll_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4415 void Riscv64Assembler::VSll_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4416   AssertExtensionsEnabled(Riscv64Extension::kV);
4417   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4418   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4419   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4420 }
4421 
VSll_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4422 void Riscv64Assembler::VSll_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4423   AssertExtensionsEnabled(Riscv64Extension::kV);
4424   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4425   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4426   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4427 }
4428 
VSmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4429 void Riscv64Assembler::VSmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4430   AssertExtensionsEnabled(Riscv64Extension::kV);
4431   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4432   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
4433   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4434 }
4435 
VSmul_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4436 void Riscv64Assembler::VSmul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4437   AssertExtensionsEnabled(Riscv64Extension::kV);
4438   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4439   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
4440   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4441 }
4442 
Vmv1r_v(VRegister vd,VRegister vs2)4443 void Riscv64Assembler::Vmv1r_v(VRegister vd, VRegister vs2) {
4444   AssertExtensionsEnabled(Riscv64Extension::kV);
4445   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4446   EmitR(
4447       funct7, vs2, enum_cast<uint32_t>(Nf::k1), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4448 }
4449 
Vmv2r_v(VRegister vd,VRegister vs2)4450 void Riscv64Assembler::Vmv2r_v(VRegister vd, VRegister vs2) {
4451   AssertExtensionsEnabled(Riscv64Extension::kV);
4452   DCHECK_EQ(enum_cast<uint32_t>(vd) % 2, 0u);
4453   DCHECK_EQ(enum_cast<uint32_t>(vs2) % 2, 0u);
4454   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4455   EmitR(
4456       funct7, vs2, enum_cast<uint32_t>(Nf::k2), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4457 }
4458 
Vmv4r_v(VRegister vd,VRegister vs2)4459 void Riscv64Assembler::Vmv4r_v(VRegister vd, VRegister vs2) {
4460   AssertExtensionsEnabled(Riscv64Extension::kV);
4461   DCHECK_EQ(enum_cast<uint32_t>(vd) % 4, 0u);
4462   DCHECK_EQ(enum_cast<uint32_t>(vs2) % 4, 0u);
4463   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4464   EmitR(
4465       funct7, vs2, enum_cast<uint32_t>(Nf::k4), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4466 }
4467 
Vmv8r_v(VRegister vd,VRegister vs2)4468 void Riscv64Assembler::Vmv8r_v(VRegister vd, VRegister vs2) {
4469   AssertExtensionsEnabled(Riscv64Extension::kV);
4470   DCHECK_EQ(enum_cast<uint32_t>(vd) % 8, 0u);
4471   DCHECK_EQ(enum_cast<uint32_t>(vs2) % 8, 0u);
4472   const uint32_t funct7 = EncodeRVVF7(0b100111, VM::kUnmasked);
4473   EmitR(
4474       funct7, vs2, enum_cast<uint32_t>(Nf::k8), enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4475 }
4476 
VSrl_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4477 void Riscv64Assembler::VSrl_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4478   AssertExtensionsEnabled(Riscv64Extension::kV);
4479   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4480   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
4481   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4482 }
4483 
VSrl_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4484 void Riscv64Assembler::VSrl_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4485   AssertExtensionsEnabled(Riscv64Extension::kV);
4486   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4487   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
4488   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4489 }
4490 
VSrl_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4491 void Riscv64Assembler::VSrl_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4492   AssertExtensionsEnabled(Riscv64Extension::kV);
4493   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4494   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
4495   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4496 }
4497 
VSra_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4498 void Riscv64Assembler::VSra_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4499   AssertExtensionsEnabled(Riscv64Extension::kV);
4500   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4501   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4502   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4503 }
4504 
VSra_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4505 void Riscv64Assembler::VSra_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4506   AssertExtensionsEnabled(Riscv64Extension::kV);
4507   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4508   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4509   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4510 }
4511 
VSra_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4512 void Riscv64Assembler::VSra_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4513   AssertExtensionsEnabled(Riscv64Extension::kV);
4514   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4515   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4516   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4517 }
4518 
VSsrl_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4519 void Riscv64Assembler::VSsrl_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4520   AssertExtensionsEnabled(Riscv64Extension::kV);
4521   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4522   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
4523   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4524 }
4525 
VSsrl_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4526 void Riscv64Assembler::VSsrl_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4527   AssertExtensionsEnabled(Riscv64Extension::kV);
4528   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4529   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
4530   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4531 }
4532 
VSsrl_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4533 void Riscv64Assembler::VSsrl_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4534   AssertExtensionsEnabled(Riscv64Extension::kV);
4535   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4536   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
4537   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4538 }
4539 
VSsra_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4540 void Riscv64Assembler::VSsra_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4541   AssertExtensionsEnabled(Riscv64Extension::kV);
4542   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4543   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4544   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4545 }
4546 
VSsra_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4547 void Riscv64Assembler::VSsra_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4548   AssertExtensionsEnabled(Riscv64Extension::kV);
4549   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4550   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4551   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4552 }
4553 
VSsra_vi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4554 void Riscv64Assembler::VSsra_vi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4555   AssertExtensionsEnabled(Riscv64Extension::kV);
4556   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4557   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4558   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4559 }
4560 
VNsrl_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4561 void Riscv64Assembler::VNsrl_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4562   AssertExtensionsEnabled(Riscv64Extension::kV);
4563   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4564   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
4565   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4566 }
4567 
VNsrl_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4568 void Riscv64Assembler::VNsrl_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4569   AssertExtensionsEnabled(Riscv64Extension::kV);
4570   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4571   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
4572   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4573 }
4574 
VNsrl_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4575 void Riscv64Assembler::VNsrl_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4576   AssertExtensionsEnabled(Riscv64Extension::kV);
4577   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4578   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
4579   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4580 }
4581 
VNcvt_x_x_w(VRegister vd,VRegister vs2,VM vm)4582 void Riscv64Assembler::VNcvt_x_x_w(VRegister vd, VRegister vs2, VM vm) {
4583   AssertExtensionsEnabled(Riscv64Extension::kV);
4584   VNsrl_wx(vd, vs2, Zero, vm);
4585 }
4586 
VNsra_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4587 void Riscv64Assembler::VNsra_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4588   AssertExtensionsEnabled(Riscv64Extension::kV);
4589   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4590   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4591   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4592 }
4593 
VNsra_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4594 void Riscv64Assembler::VNsra_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4595   AssertExtensionsEnabled(Riscv64Extension::kV);
4596   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4597   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4598   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4599 }
4600 
VNsra_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4601 void Riscv64Assembler::VNsra_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4602   AssertExtensionsEnabled(Riscv64Extension::kV);
4603   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4604   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4605   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4606 }
4607 
VNclipu_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4608 void Riscv64Assembler::VNclipu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4609   AssertExtensionsEnabled(Riscv64Extension::kV);
4610   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4611   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
4612   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4613 }
4614 
VNclipu_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4615 void Riscv64Assembler::VNclipu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4616   AssertExtensionsEnabled(Riscv64Extension::kV);
4617   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4618   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
4619   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4620 }
4621 
VNclipu_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4622 void Riscv64Assembler::VNclipu_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4623   AssertExtensionsEnabled(Riscv64Extension::kV);
4624   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4625   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
4626   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4627 }
4628 
VNclip_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4629 void Riscv64Assembler::VNclip_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4630   AssertExtensionsEnabled(Riscv64Extension::kV);
4631   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4632   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
4633   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4634 }
4635 
VNclip_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4636 void Riscv64Assembler::VNclip_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4637   AssertExtensionsEnabled(Riscv64Extension::kV);
4638   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4639   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
4640   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPIVX), vd, 0x57);
4641 }
4642 
VNclip_wi(VRegister vd,VRegister vs2,uint32_t uimm5,VM vm)4643 void Riscv64Assembler::VNclip_wi(VRegister vd, VRegister vs2, uint32_t uimm5, VM vm) {
4644   AssertExtensionsEnabled(Riscv64Extension::kV);
4645   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4646   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
4647   EmitR(funct7, vs2, uimm5, enum_cast<uint32_t>(VAIEncoding::kOPIVI), vd, 0x57);
4648 }
4649 
VWredsumu_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4650 void Riscv64Assembler::VWredsumu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4651   AssertExtensionsEnabled(Riscv64Extension::kV);
4652   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
4653   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4654 }
4655 
VWredsum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4656 void Riscv64Assembler::VWredsum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4657   AssertExtensionsEnabled(Riscv64Extension::kV);
4658   const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
4659   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPIVV), vd, 0x57);
4660 }
4661 
VRedsum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4662 void Riscv64Assembler::VRedsum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4663   AssertExtensionsEnabled(Riscv64Extension::kV);
4664   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
4665   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4666 }
4667 
VRedand_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4668 void Riscv64Assembler::VRedand_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4669   AssertExtensionsEnabled(Riscv64Extension::kV);
4670   const uint32_t funct7 = EncodeRVVF7(0b000001, vm);
4671   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4672 }
4673 
VRedor_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4674 void Riscv64Assembler::VRedor_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4675   AssertExtensionsEnabled(Riscv64Extension::kV);
4676   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
4677   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4678 }
4679 
VRedxor_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4680 void Riscv64Assembler::VRedxor_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4681   AssertExtensionsEnabled(Riscv64Extension::kV);
4682   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
4683   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4684 }
4685 
VRedminu_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4686 void Riscv64Assembler::VRedminu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4687   AssertExtensionsEnabled(Riscv64Extension::kV);
4688   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
4689   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4690 }
4691 
VRedmin_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4692 void Riscv64Assembler::VRedmin_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4693   AssertExtensionsEnabled(Riscv64Extension::kV);
4694   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
4695   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4696 }
4697 
VRedmaxu_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4698 void Riscv64Assembler::VRedmaxu_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4699   AssertExtensionsEnabled(Riscv64Extension::kV);
4700   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
4701   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4702 }
4703 
VRedmax_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4704 void Riscv64Assembler::VRedmax_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4705   AssertExtensionsEnabled(Riscv64Extension::kV);
4706   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
4707   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4708 }
4709 
VAaddu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4710 void Riscv64Assembler::VAaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4711   AssertExtensionsEnabled(Riscv64Extension::kV);
4712   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4713   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
4714   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4715 }
4716 
VAaddu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4717 void Riscv64Assembler::VAaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4718   AssertExtensionsEnabled(Riscv64Extension::kV);
4719   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4720   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
4721   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4722 }
4723 
VAadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4724 void Riscv64Assembler::VAadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4725   AssertExtensionsEnabled(Riscv64Extension::kV);
4726   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4727   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
4728   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4729 }
4730 
VAadd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4731 void Riscv64Assembler::VAadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4732   AssertExtensionsEnabled(Riscv64Extension::kV);
4733   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4734   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
4735   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4736 }
4737 
VAsubu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4738 void Riscv64Assembler::VAsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4739   AssertExtensionsEnabled(Riscv64Extension::kV);
4740   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4741   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
4742   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4743 }
4744 
VAsubu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4745 void Riscv64Assembler::VAsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4746   AssertExtensionsEnabled(Riscv64Extension::kV);
4747   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4748   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
4749   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4750 }
4751 
VAsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4752 void Riscv64Assembler::VAsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4753   AssertExtensionsEnabled(Riscv64Extension::kV);
4754   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4755   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
4756   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4757 }
4758 
VAsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4759 void Riscv64Assembler::VAsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4760   AssertExtensionsEnabled(Riscv64Extension::kV);
4761   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4762   const uint32_t funct7 = EncodeRVVF7(0b001011, vm);
4763   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4764 }
4765 
VSlide1up_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4766 void Riscv64Assembler::VSlide1up_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4767   AssertExtensionsEnabled(Riscv64Extension::kV);
4768   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4769   DCHECK(vd != vs2);
4770   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
4771   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4772 }
4773 
VSlide1down_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4774 void Riscv64Assembler::VSlide1down_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4775   AssertExtensionsEnabled(Riscv64Extension::kV);
4776   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4777   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
4778   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4779 }
4780 
VCompress_vm(VRegister vd,VRegister vs2,VRegister vs1)4781 void Riscv64Assembler::VCompress_vm(VRegister vd, VRegister vs2, VRegister vs1) {
4782   AssertExtensionsEnabled(Riscv64Extension::kV);
4783   DCHECK(vd != vs1);
4784   DCHECK(vd != vs2);
4785   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
4786   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4787 }
4788 
VMandn_mm(VRegister vd,VRegister vs2,VRegister vs1)4789 void Riscv64Assembler::VMandn_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4790   AssertExtensionsEnabled(Riscv64Extension::kV);
4791   const uint32_t funct7 = EncodeRVVF7(0b011000, VM::kUnmasked);
4792   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4793 }
4794 
VMand_mm(VRegister vd,VRegister vs2,VRegister vs1)4795 void Riscv64Assembler::VMand_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4796   AssertExtensionsEnabled(Riscv64Extension::kV);
4797   const uint32_t funct7 = EncodeRVVF7(0b011001, VM::kUnmasked);
4798   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4799 }
4800 
VMmv_m(VRegister vd,VRegister vs2)4801 void Riscv64Assembler::VMmv_m(VRegister vd, VRegister vs2) { VMand_mm(vd, vs2, vs2); }
4802 
VMor_mm(VRegister vd,VRegister vs2,VRegister vs1)4803 void Riscv64Assembler::VMor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4804   AssertExtensionsEnabled(Riscv64Extension::kV);
4805   const uint32_t funct7 = EncodeRVVF7(0b011010, VM::kUnmasked);
4806   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4807 }
4808 
VMxor_mm(VRegister vd,VRegister vs2,VRegister vs1)4809 void Riscv64Assembler::VMxor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4810   AssertExtensionsEnabled(Riscv64Extension::kV);
4811   const uint32_t funct7 = EncodeRVVF7(0b011011, VM::kUnmasked);
4812   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4813 }
4814 
VMclr_m(VRegister vd)4815 void Riscv64Assembler::VMclr_m(VRegister vd) { VMxor_mm(vd, vd, vd); }
4816 
VMorn_mm(VRegister vd,VRegister vs2,VRegister vs1)4817 void Riscv64Assembler::VMorn_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4818   AssertExtensionsEnabled(Riscv64Extension::kV);
4819   const uint32_t funct7 = EncodeRVVF7(0b011100, VM::kUnmasked);
4820   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4821 }
4822 
VMnand_mm(VRegister vd,VRegister vs2,VRegister vs1)4823 void Riscv64Assembler::VMnand_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4824   AssertExtensionsEnabled(Riscv64Extension::kV);
4825   const uint32_t funct7 = EncodeRVVF7(0b011101, VM::kUnmasked);
4826   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4827 }
4828 
VMnot_m(VRegister vd,VRegister vs2)4829 void Riscv64Assembler::VMnot_m(VRegister vd, VRegister vs2) { VMnand_mm(vd, vs2, vs2); }
4830 
VMnor_mm(VRegister vd,VRegister vs2,VRegister vs1)4831 void Riscv64Assembler::VMnor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4832   AssertExtensionsEnabled(Riscv64Extension::kV);
4833   const uint32_t funct7 = EncodeRVVF7(0b011110, VM::kUnmasked);
4834   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4835 }
4836 
VMxnor_mm(VRegister vd,VRegister vs2,VRegister vs1)4837 void Riscv64Assembler::VMxnor_mm(VRegister vd, VRegister vs2, VRegister vs1) {
4838   AssertExtensionsEnabled(Riscv64Extension::kV);
4839   const uint32_t funct7 = EncodeRVVF7(0b011111, VM::kUnmasked);
4840   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4841 }
4842 
VMset_m(VRegister vd)4843 void Riscv64Assembler::VMset_m(VRegister vd) { VMxnor_mm(vd, vd, vd); }
4844 
VDivu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4845 void Riscv64Assembler::VDivu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4846   AssertExtensionsEnabled(Riscv64Extension::kV);
4847   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4848   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4849   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4850 }
4851 
VDivu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4852 void Riscv64Assembler::VDivu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4853   AssertExtensionsEnabled(Riscv64Extension::kV);
4854   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4855   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
4856   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4857 }
4858 
VDiv_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4859 void Riscv64Assembler::VDiv_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4860   AssertExtensionsEnabled(Riscv64Extension::kV);
4861   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4862   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4863   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4864 }
4865 
VDiv_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4866 void Riscv64Assembler::VDiv_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4867   AssertExtensionsEnabled(Riscv64Extension::kV);
4868   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4869   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
4870   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4871 }
4872 
VRemu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4873 void Riscv64Assembler::VRemu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4874   AssertExtensionsEnabled(Riscv64Extension::kV);
4875   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4876   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4877   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4878 }
4879 
VRemu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4880 void Riscv64Assembler::VRemu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4881   AssertExtensionsEnabled(Riscv64Extension::kV);
4882   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4883   const uint32_t funct7 = EncodeRVVF7(0b100010, vm);
4884   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4885 }
4886 
VRem_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4887 void Riscv64Assembler::VRem_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4888   AssertExtensionsEnabled(Riscv64Extension::kV);
4889   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4890   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4891   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4892 }
4893 
VRem_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4894 void Riscv64Assembler::VRem_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4895   AssertExtensionsEnabled(Riscv64Extension::kV);
4896   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4897   const uint32_t funct7 = EncodeRVVF7(0b100011, vm);
4898   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4899 }
4900 
VMulhu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4901 void Riscv64Assembler::VMulhu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4902   AssertExtensionsEnabled(Riscv64Extension::kV);
4903   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4904   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
4905   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4906 }
4907 
VMulhu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4908 void Riscv64Assembler::VMulhu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4909   AssertExtensionsEnabled(Riscv64Extension::kV);
4910   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4911   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
4912   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4913 }
4914 
VMul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4915 void Riscv64Assembler::VMul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4916   AssertExtensionsEnabled(Riscv64Extension::kV);
4917   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4918   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4919   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4920 }
4921 
VMul_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4922 void Riscv64Assembler::VMul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4923   AssertExtensionsEnabled(Riscv64Extension::kV);
4924   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4925   const uint32_t funct7 = EncodeRVVF7(0b100101, vm);
4926   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4927 }
4928 
VMulhsu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4929 void Riscv64Assembler::VMulhsu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4930   AssertExtensionsEnabled(Riscv64Extension::kV);
4931   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4932   const uint32_t funct7 = EncodeRVVF7(0b100110, vm);
4933   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4934 }
4935 
VMulhsu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4936 void Riscv64Assembler::VMulhsu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4937   AssertExtensionsEnabled(Riscv64Extension::kV);
4938   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4939   const uint32_t funct7 = EncodeRVVF7(0b100110, vm);
4940   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4941 }
4942 
VMulh_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)4943 void Riscv64Assembler::VMulh_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
4944   AssertExtensionsEnabled(Riscv64Extension::kV);
4945   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4946   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
4947   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4948 }
4949 
VMulh_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)4950 void Riscv64Assembler::VMulh_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
4951   AssertExtensionsEnabled(Riscv64Extension::kV);
4952   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4953   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
4954   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4955 }
4956 
VMadd_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)4957 void Riscv64Assembler::VMadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
4958   AssertExtensionsEnabled(Riscv64Extension::kV);
4959   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4960   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4961   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4962 }
4963 
VMadd_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)4964 void Riscv64Assembler::VMadd_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
4965   AssertExtensionsEnabled(Riscv64Extension::kV);
4966   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4967   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
4968   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4969 }
4970 
VNmsub_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)4971 void Riscv64Assembler::VNmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
4972   AssertExtensionsEnabled(Riscv64Extension::kV);
4973   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4974   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4975   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4976 }
4977 
VNmsub_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)4978 void Riscv64Assembler::VNmsub_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
4979   AssertExtensionsEnabled(Riscv64Extension::kV);
4980   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4981   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
4982   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4983 }
4984 
VMacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)4985 void Riscv64Assembler::VMacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
4986   AssertExtensionsEnabled(Riscv64Extension::kV);
4987   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4988   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4989   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
4990 }
4991 
VMacc_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)4992 void Riscv64Assembler::VMacc_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
4993   AssertExtensionsEnabled(Riscv64Extension::kV);
4994   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
4995   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
4996   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
4997 }
4998 
VNmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)4999 void Riscv64Assembler::VNmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5000   AssertExtensionsEnabled(Riscv64Extension::kV);
5001   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5002   DCHECK(vd != vs1);
5003   DCHECK(vd != vs2);
5004   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5005   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5006 }
5007 
VNmsac_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5008 void Riscv64Assembler::VNmsac_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5009   AssertExtensionsEnabled(Riscv64Extension::kV);
5010   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5011   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5012   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5013 }
5014 
VWaddu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5015 void Riscv64Assembler::VWaddu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5016   AssertExtensionsEnabled(Riscv64Extension::kV);
5017   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5018   DCHECK(vd != vs1);
5019   DCHECK(vd != vs2);
5020   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5021   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5022 }
5023 
VWaddu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5024 void Riscv64Assembler::VWaddu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5025   AssertExtensionsEnabled(Riscv64Extension::kV);
5026   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5027   DCHECK(vd != vs2);
5028   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5029   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5030 }
5031 
VWcvtu_x_x_v(VRegister vd,VRegister vs,VM vm)5032 void Riscv64Assembler::VWcvtu_x_x_v(VRegister vd, VRegister vs, VM vm) {
5033   VWaddu_vx(vd, vs, Zero, vm);
5034 }
5035 
VWadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5036 void Riscv64Assembler::VWadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5037   AssertExtensionsEnabled(Riscv64Extension::kV);
5038   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5039   DCHECK(vd != vs1);
5040   DCHECK(vd != vs2);
5041   const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
5042   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5043 }
5044 
VWadd_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5045 void Riscv64Assembler::VWadd_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5046   AssertExtensionsEnabled(Riscv64Extension::kV);
5047   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5048   DCHECK(vd != vs2);
5049   const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
5050   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5051 }
5052 
VWcvt_x_x_v(VRegister vd,VRegister vs,VM vm)5053 void Riscv64Assembler::VWcvt_x_x_v(VRegister vd, VRegister vs, VM vm) {
5054   VWadd_vx(vd, vs, Zero, vm);
5055 }
5056 
VWsubu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5057 void Riscv64Assembler::VWsubu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5058   AssertExtensionsEnabled(Riscv64Extension::kV);
5059   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5060   DCHECK(vd != vs1);
5061   DCHECK(vd != vs2);
5062   const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5063   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5064 }
5065 
VWsubu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5066 void Riscv64Assembler::VWsubu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5067   AssertExtensionsEnabled(Riscv64Extension::kV);
5068   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5069   DCHECK(vd != vs2);
5070   const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5071   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5072 }
5073 
VWsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5074 void Riscv64Assembler::VWsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5075   AssertExtensionsEnabled(Riscv64Extension::kV);
5076   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5077   DCHECK(vd != vs1);
5078   DCHECK(vd != vs2);
5079   const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
5080   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5081 }
5082 
VWsub_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5083 void Riscv64Assembler::VWsub_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5084   AssertExtensionsEnabled(Riscv64Extension::kV);
5085   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5086   DCHECK(vd != vs2);
5087   const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
5088   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5089 }
5090 
VWaddu_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5091 void Riscv64Assembler::VWaddu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5092   AssertExtensionsEnabled(Riscv64Extension::kV);
5093   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5094   DCHECK(vd != vs1);
5095   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5096   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5097 }
5098 
VWaddu_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5099 void Riscv64Assembler::VWaddu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5100   AssertExtensionsEnabled(Riscv64Extension::kV);
5101   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5102   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5103   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5104 }
5105 
VWadd_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5106 void Riscv64Assembler::VWadd_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5107   AssertExtensionsEnabled(Riscv64Extension::kV);
5108   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5109   DCHECK(vd != vs1);
5110   const uint32_t funct7 = EncodeRVVF7(0b110101, vm);
5111   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5112 }
5113 
VWadd_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5114 void Riscv64Assembler::VWadd_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5115   AssertExtensionsEnabled(Riscv64Extension::kV);
5116   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5117   const uint32_t funct7 = EncodeRVVF7(0b110101, vm);
5118   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5119 }
5120 
VWsubu_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5121 void Riscv64Assembler::VWsubu_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5122   AssertExtensionsEnabled(Riscv64Extension::kV);
5123   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5124   DCHECK(vd != vs1);
5125   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5126   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5127 }
5128 
VWsubu_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5129 void Riscv64Assembler::VWsubu_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5130   AssertExtensionsEnabled(Riscv64Extension::kV);
5131   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5132   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5133   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5134 }
5135 
VWsub_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5136 void Riscv64Assembler::VWsub_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5137   AssertExtensionsEnabled(Riscv64Extension::kV);
5138   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5139   DCHECK(vd != vs1);
5140   const uint32_t funct7 = EncodeRVVF7(0b110111, vm);
5141   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5142 }
5143 
VWsub_wx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5144 void Riscv64Assembler::VWsub_wx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5145   AssertExtensionsEnabled(Riscv64Extension::kV);
5146   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5147   const uint32_t funct7 = EncodeRVVF7(0b110111, vm);
5148   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5149 }
5150 
VWmulu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5151 void Riscv64Assembler::VWmulu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5152   AssertExtensionsEnabled(Riscv64Extension::kV);
5153   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5154   DCHECK(vd != vs1);
5155   DCHECK(vd != vs2);
5156   const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5157   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5158 }
5159 
VWmulu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5160 void Riscv64Assembler::VWmulu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5161   AssertExtensionsEnabled(Riscv64Extension::kV);
5162   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5163   DCHECK(vd != vs2);
5164   const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5165   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5166 }
5167 
VWmulsu_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5168 void Riscv64Assembler::VWmulsu_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5169   AssertExtensionsEnabled(Riscv64Extension::kV);
5170   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5171   DCHECK(vd != vs1);
5172   DCHECK(vd != vs2);
5173   const uint32_t funct7 = EncodeRVVF7(0b111010, vm);
5174   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5175 }
5176 
VWmulsu_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5177 void Riscv64Assembler::VWmulsu_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5178   AssertExtensionsEnabled(Riscv64Extension::kV);
5179   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5180   DCHECK(vd != vs2);
5181   const uint32_t funct7 = EncodeRVVF7(0b111010, vm);
5182   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5183 }
5184 
VWmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5185 void Riscv64Assembler::VWmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5186   AssertExtensionsEnabled(Riscv64Extension::kV);
5187   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5188   DCHECK(vd != vs1);
5189   DCHECK(vd != vs2);
5190   const uint32_t funct7 = EncodeRVVF7(0b111011, vm);
5191   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5192 }
5193 
VWmul_vx(VRegister vd,VRegister vs2,XRegister rs1,VM vm)5194 void Riscv64Assembler::VWmul_vx(VRegister vd, VRegister vs2, XRegister rs1, VM vm) {
5195   AssertExtensionsEnabled(Riscv64Extension::kV);
5196   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5197   DCHECK(vd != vs2);
5198   const uint32_t funct7 = EncodeRVVF7(0b111011, vm);
5199   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5200 }
5201 
VWmaccu_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5202 void Riscv64Assembler::VWmaccu_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5203   AssertExtensionsEnabled(Riscv64Extension::kV);
5204   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5205   DCHECK(vd != vs1);
5206   DCHECK(vd != vs2);
5207   const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5208   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5209 }
5210 
VWmaccu_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5211 void Riscv64Assembler::VWmaccu_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5212   AssertExtensionsEnabled(Riscv64Extension::kV);
5213   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5214   DCHECK(vd != vs2);
5215   const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5216   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5217 }
5218 
VWmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5219 void Riscv64Assembler::VWmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5220   AssertExtensionsEnabled(Riscv64Extension::kV);
5221   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5222   DCHECK(vd != vs1);
5223   DCHECK(vd != vs2);
5224   const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5225   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5226 }
5227 
VWmacc_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5228 void Riscv64Assembler::VWmacc_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5229   AssertExtensionsEnabled(Riscv64Extension::kV);
5230   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5231   DCHECK(vd != vs2);
5232   const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5233   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5234 }
5235 
VWmaccus_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5236 void Riscv64Assembler::VWmaccus_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5237   AssertExtensionsEnabled(Riscv64Extension::kV);
5238   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5239   DCHECK(vd != vs2);
5240   const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
5241   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5242 }
5243 
VWmaccsu_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5244 void Riscv64Assembler::VWmaccsu_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5245   AssertExtensionsEnabled(Riscv64Extension::kV);
5246   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5247   DCHECK(vd != vs1);
5248   DCHECK(vd != vs2);
5249   const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5250   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5251 }
5252 
VWmaccsu_vx(VRegister vd,XRegister rs1,VRegister vs2,VM vm)5253 void Riscv64Assembler::VWmaccsu_vx(VRegister vd, XRegister rs1, VRegister vs2, VM vm) {
5254   AssertExtensionsEnabled(Riscv64Extension::kV);
5255   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5256   DCHECK(vd != vs2);
5257   const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5258   EmitR(funct7, vs2, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5259 }
5260 
VFadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5261 void Riscv64Assembler::VFadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5262   AssertExtensionsEnabled(Riscv64Extension::kV);
5263   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5264   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
5265   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5266 }
5267 
VFadd_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5268 void Riscv64Assembler::VFadd_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5269   AssertExtensionsEnabled(Riscv64Extension::kV);
5270   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5271   const uint32_t funct7 = EncodeRVVF7(0b000000, vm);
5272   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5273 }
5274 
VFredusum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5275 void Riscv64Assembler::VFredusum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5276   AssertExtensionsEnabled(Riscv64Extension::kV);
5277   const uint32_t funct7 = EncodeRVVF7(0b000001, vm);
5278   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5279 }
5280 
VFsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5281 void Riscv64Assembler::VFsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5282   AssertExtensionsEnabled(Riscv64Extension::kV);
5283   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5284   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
5285   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5286 }
5287 
VFsub_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5288 void Riscv64Assembler::VFsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5289   AssertExtensionsEnabled(Riscv64Extension::kV);
5290   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5291   const uint32_t funct7 = EncodeRVVF7(0b000010, vm);
5292   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5293 }
5294 
VFredosum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5295 void Riscv64Assembler::VFredosum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5296   AssertExtensionsEnabled(Riscv64Extension::kV);
5297   const uint32_t funct7 = EncodeRVVF7(0b000011, vm);
5298   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5299 }
5300 
VFmin_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5301 void Riscv64Assembler::VFmin_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5302   AssertExtensionsEnabled(Riscv64Extension::kV);
5303   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5304   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
5305   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5306 }
5307 
VFmin_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5308 void Riscv64Assembler::VFmin_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5309   AssertExtensionsEnabled(Riscv64Extension::kV);
5310   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5311   const uint32_t funct7 = EncodeRVVF7(0b000100, vm);
5312   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5313 }
5314 
VFredmin_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5315 void Riscv64Assembler::VFredmin_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5316   AssertExtensionsEnabled(Riscv64Extension::kV);
5317   const uint32_t funct7 = EncodeRVVF7(0b000101, vm);
5318   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5319 }
5320 
VFmax_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5321 void Riscv64Assembler::VFmax_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5322   AssertExtensionsEnabled(Riscv64Extension::kV);
5323   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5324   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
5325   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5326 }
5327 
VFmax_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5328 void Riscv64Assembler::VFmax_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5329   AssertExtensionsEnabled(Riscv64Extension::kV);
5330   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5331   const uint32_t funct7 = EncodeRVVF7(0b000110, vm);
5332   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5333 }
5334 
VFredmax_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5335 void Riscv64Assembler::VFredmax_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5336   AssertExtensionsEnabled(Riscv64Extension::kV);
5337   const uint32_t funct7 = EncodeRVVF7(0b000111, vm);
5338   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5339 }
5340 
VFsgnj_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5341 void Riscv64Assembler::VFsgnj_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5342   AssertExtensionsEnabled(Riscv64Extension::kV);
5343   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5344   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
5345   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5346 }
5347 
VFsgnj_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5348 void Riscv64Assembler::VFsgnj_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5349   AssertExtensionsEnabled(Riscv64Extension::kV);
5350   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5351   const uint32_t funct7 = EncodeRVVF7(0b001000, vm);
5352   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5353 }
5354 
VFsgnjn_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5355 void Riscv64Assembler::VFsgnjn_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5356   AssertExtensionsEnabled(Riscv64Extension::kV);
5357   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5358   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
5359   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5360 }
5361 
VFsgnjn_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5362 void Riscv64Assembler::VFsgnjn_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5363   AssertExtensionsEnabled(Riscv64Extension::kV);
5364   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5365   const uint32_t funct7 = EncodeRVVF7(0b001001, vm);
5366   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5367 }
5368 
VFneg_v(VRegister vd,VRegister vs)5369 void Riscv64Assembler::VFneg_v(VRegister vd, VRegister vs) { VFsgnjn_vv(vd, vs, vs); }
5370 
VFsgnjx_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5371 void Riscv64Assembler::VFsgnjx_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5372   AssertExtensionsEnabled(Riscv64Extension::kV);
5373   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5374   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
5375   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5376 }
5377 
VFsgnjx_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5378 void Riscv64Assembler::VFsgnjx_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5379   AssertExtensionsEnabled(Riscv64Extension::kV);
5380   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5381   const uint32_t funct7 = EncodeRVVF7(0b001010, vm);
5382   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5383 }
5384 
VFabs_v(VRegister vd,VRegister vs)5385 void Riscv64Assembler::VFabs_v(VRegister vd, VRegister vs) { VFsgnjx_vv(vd, vs, vs); }
5386 
VFslide1up_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5387 void Riscv64Assembler::VFslide1up_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5388   AssertExtensionsEnabled(Riscv64Extension::kV);
5389   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5390   DCHECK(vd != vs2);
5391   const uint32_t funct7 = EncodeRVVF7(0b001110, vm);
5392   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5393 }
5394 
VFslide1down_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5395 void Riscv64Assembler::VFslide1down_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5396   AssertExtensionsEnabled(Riscv64Extension::kV);
5397   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5398   const uint32_t funct7 = EncodeRVVF7(0b001111, vm);
5399   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5400 }
5401 
VFmerge_vfm(VRegister vd,VRegister vs2,FRegister fs1)5402 void Riscv64Assembler::VFmerge_vfm(VRegister vd, VRegister vs2, FRegister fs1) {
5403   AssertExtensionsEnabled(Riscv64Extension::kV);
5404   DCHECK(vd != V0);
5405   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kV0_t);
5406   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5407 }
5408 
VFmv_v_f(VRegister vd,FRegister fs1)5409 void Riscv64Assembler::VFmv_v_f(VRegister vd, FRegister fs1) {
5410   AssertExtensionsEnabled(Riscv64Extension::kV);
5411   const uint32_t funct7 = EncodeRVVF7(0b010111, VM::kUnmasked);
5412   EmitR(funct7, V0, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5413 }
5414 
VMfeq_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5415 void Riscv64Assembler::VMfeq_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5416   AssertExtensionsEnabled(Riscv64Extension::kV);
5417   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5418   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
5419   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5420 }
5421 
VMfeq_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5422 void Riscv64Assembler::VMfeq_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5423   AssertExtensionsEnabled(Riscv64Extension::kV);
5424   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5425   const uint32_t funct7 = EncodeRVVF7(0b011000, vm);
5426   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5427 }
5428 
VMfle_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5429 void Riscv64Assembler::VMfle_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5430   AssertExtensionsEnabled(Riscv64Extension::kV);
5431   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5432   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
5433   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5434 }
5435 
VMfle_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5436 void Riscv64Assembler::VMfle_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5437   AssertExtensionsEnabled(Riscv64Extension::kV);
5438   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5439   const uint32_t funct7 = EncodeRVVF7(0b011001, vm);
5440   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5441 }
5442 
VMfge_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5443 void Riscv64Assembler::VMfge_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5444   VMfle_vv(vd, vs1, vs2, vm);
5445 }
5446 
VMflt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5447 void Riscv64Assembler::VMflt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5448   AssertExtensionsEnabled(Riscv64Extension::kV);
5449   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5450   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
5451   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5452 }
5453 
VMflt_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5454 void Riscv64Assembler::VMflt_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5455   AssertExtensionsEnabled(Riscv64Extension::kV);
5456   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5457   const uint32_t funct7 = EncodeRVVF7(0b011011, vm);
5458   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5459 }
5460 
VMfgt_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5461 void Riscv64Assembler::VMfgt_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5462   VMflt_vv(vd, vs1, vs2, vm);
5463 }
5464 
VMfne_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5465 void Riscv64Assembler::VMfne_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5466   AssertExtensionsEnabled(Riscv64Extension::kV);
5467   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5468   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
5469   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5470 }
5471 
VMfne_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5472 void Riscv64Assembler::VMfne_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5473   AssertExtensionsEnabled(Riscv64Extension::kV);
5474   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5475   const uint32_t funct7 = EncodeRVVF7(0b011100, vm);
5476   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5477 }
5478 
VMfgt_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5479 void Riscv64Assembler::VMfgt_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5480   AssertExtensionsEnabled(Riscv64Extension::kV);
5481   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5482   const uint32_t funct7 = EncodeRVVF7(0b011101, vm);
5483   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5484 }
5485 
VMfge_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5486 void Riscv64Assembler::VMfge_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5487   AssertExtensionsEnabled(Riscv64Extension::kV);
5488   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5489   const uint32_t funct7 = EncodeRVVF7(0b011111, vm);
5490   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5491 }
5492 
VFdiv_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5493 void Riscv64Assembler::VFdiv_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5494   AssertExtensionsEnabled(Riscv64Extension::kV);
5495   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
5496   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5497 }
5498 
VFdiv_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5499 void Riscv64Assembler::VFdiv_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5500   AssertExtensionsEnabled(Riscv64Extension::kV);
5501   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5502   const uint32_t funct7 = EncodeRVVF7(0b100000, vm);
5503   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5504 }
5505 
VFrdiv_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5506 void Riscv64Assembler::VFrdiv_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5507   AssertExtensionsEnabled(Riscv64Extension::kV);
5508   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5509   const uint32_t funct7 = EncodeRVVF7(0b100001, vm);
5510   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5511 }
5512 
VFmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5513 void Riscv64Assembler::VFmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5514   AssertExtensionsEnabled(Riscv64Extension::kV);
5515   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5516   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
5517   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5518 }
5519 
VFmul_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5520 void Riscv64Assembler::VFmul_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5521   AssertExtensionsEnabled(Riscv64Extension::kV);
5522   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5523   const uint32_t funct7 = EncodeRVVF7(0b100100, vm);
5524   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5525 }
5526 
VFrsub_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5527 void Riscv64Assembler::VFrsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5528   AssertExtensionsEnabled(Riscv64Extension::kV);
5529   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5530   const uint32_t funct7 = EncodeRVVF7(0b100111, vm);
5531   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5532 }
5533 
VFmadd_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5534 void Riscv64Assembler::VFmadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5535   AssertExtensionsEnabled(Riscv64Extension::kV);
5536   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5537   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
5538   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5539 }
5540 
VFmadd_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5541 void Riscv64Assembler::VFmadd_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5542   AssertExtensionsEnabled(Riscv64Extension::kV);
5543   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5544   const uint32_t funct7 = EncodeRVVF7(0b101000, vm);
5545   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5546 }
5547 
VFnmadd_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5548 void Riscv64Assembler::VFnmadd_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5549   AssertExtensionsEnabled(Riscv64Extension::kV);
5550   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5551   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
5552   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5553 }
5554 
VFnmadd_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5555 void Riscv64Assembler::VFnmadd_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5556   AssertExtensionsEnabled(Riscv64Extension::kV);
5557   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5558   const uint32_t funct7 = EncodeRVVF7(0b101001, vm);
5559   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5560 }
5561 
VFmsub_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5562 void Riscv64Assembler::VFmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5563   AssertExtensionsEnabled(Riscv64Extension::kV);
5564   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5565   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
5566   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5567 }
5568 
VFmsub_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5569 void Riscv64Assembler::VFmsub_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5570   AssertExtensionsEnabled(Riscv64Extension::kV);
5571   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5572   const uint32_t funct7 = EncodeRVVF7(0b101010, vm);
5573   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5574 }
5575 
VFnmsub_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5576 void Riscv64Assembler::VFnmsub_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5577   AssertExtensionsEnabled(Riscv64Extension::kV);
5578   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5579   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
5580   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5581 }
5582 
VFnmsub_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5583 void Riscv64Assembler::VFnmsub_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5584   AssertExtensionsEnabled(Riscv64Extension::kV);
5585   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5586   const uint32_t funct7 = EncodeRVVF7(0b101011, vm);
5587   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5588 }
5589 
VFmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5590 void Riscv64Assembler::VFmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5591   AssertExtensionsEnabled(Riscv64Extension::kV);
5592   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5593   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
5594   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5595 }
5596 
VFmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5597 void Riscv64Assembler::VFmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5598   AssertExtensionsEnabled(Riscv64Extension::kV);
5599   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5600   const uint32_t funct7 = EncodeRVVF7(0b101100, vm);
5601   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5602 }
5603 
VFnmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5604 void Riscv64Assembler::VFnmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5605   AssertExtensionsEnabled(Riscv64Extension::kV);
5606   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5607   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
5608   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5609 }
5610 
VFnmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5611 void Riscv64Assembler::VFnmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5612   AssertExtensionsEnabled(Riscv64Extension::kV);
5613   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5614   const uint32_t funct7 = EncodeRVVF7(0b101101, vm);
5615   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5616 }
5617 
VFmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5618 void Riscv64Assembler::VFmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5619   AssertExtensionsEnabled(Riscv64Extension::kV);
5620   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5621   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
5622   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5623 }
5624 
VFmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5625 void Riscv64Assembler::VFmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5626   AssertExtensionsEnabled(Riscv64Extension::kV);
5627   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5628   const uint32_t funct7 = EncodeRVVF7(0b101110, vm);
5629   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5630 }
5631 
VFnmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5632 void Riscv64Assembler::VFnmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5633   AssertExtensionsEnabled(Riscv64Extension::kV);
5634   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5635   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5636   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5637 }
5638 
VFnmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5639 void Riscv64Assembler::VFnmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5640   AssertExtensionsEnabled(Riscv64Extension::kV);
5641   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5642   const uint32_t funct7 = EncodeRVVF7(0b101111, vm);
5643   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5644 }
5645 
VFwadd_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5646 void Riscv64Assembler::VFwadd_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5647   AssertExtensionsEnabled(Riscv64Extension::kV);
5648   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5649   DCHECK(vd != vs1);
5650   DCHECK(vd != vs2);
5651   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5652   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5653 }
5654 
VFwadd_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5655 void Riscv64Assembler::VFwadd_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5656   AssertExtensionsEnabled(Riscv64Extension::kV);
5657   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5658   DCHECK(vd != vs2);
5659   const uint32_t funct7 = EncodeRVVF7(0b110000, vm);
5660   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5661 }
5662 
VFwredusum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5663 void Riscv64Assembler::VFwredusum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5664   AssertExtensionsEnabled(Riscv64Extension::kV);
5665   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5666   const uint32_t funct7 = EncodeRVVF7(0b110001, vm);
5667   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5668 }
5669 
VFwsub_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5670 void Riscv64Assembler::VFwsub_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5671   AssertExtensionsEnabled(Riscv64Extension::kV);
5672   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5673   DCHECK(vd != vs1);
5674   DCHECK(vd != vs2);
5675   const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5676   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5677 }
5678 
VFwsub_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5679 void Riscv64Assembler::VFwsub_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5680   AssertExtensionsEnabled(Riscv64Extension::kV);
5681   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5682   DCHECK(vd != vs2);
5683   const uint32_t funct7 = EncodeRVVF7(0b110010, vm);
5684   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5685 }
5686 
VFwredosum_vs(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5687 void Riscv64Assembler::VFwredosum_vs(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5688   AssertExtensionsEnabled(Riscv64Extension::kV);
5689   const uint32_t funct7 = EncodeRVVF7(0b110011, vm);
5690   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5691 }
5692 
VFwadd_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5693 void Riscv64Assembler::VFwadd_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5694   AssertExtensionsEnabled(Riscv64Extension::kV);
5695   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5696   DCHECK(vd != vs1);
5697   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5698   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5699 }
5700 
VFwadd_wf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5701 void Riscv64Assembler::VFwadd_wf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5702   AssertExtensionsEnabled(Riscv64Extension::kV);
5703   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5704   const uint32_t funct7 = EncodeRVVF7(0b110100, vm);
5705   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5706 }
5707 
VFwsub_wv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5708 void Riscv64Assembler::VFwsub_wv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5709   AssertExtensionsEnabled(Riscv64Extension::kV);
5710   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5711   DCHECK(vd != vs1);
5712   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5713   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5714 }
5715 
VFwsub_wf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5716 void Riscv64Assembler::VFwsub_wf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5717   AssertExtensionsEnabled(Riscv64Extension::kV);
5718   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5719   const uint32_t funct7 = EncodeRVVF7(0b110110, vm);
5720   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5721 }
5722 
VFwmul_vv(VRegister vd,VRegister vs2,VRegister vs1,VM vm)5723 void Riscv64Assembler::VFwmul_vv(VRegister vd, VRegister vs2, VRegister vs1, VM vm) {
5724   AssertExtensionsEnabled(Riscv64Extension::kV);
5725   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5726   DCHECK(vd != vs1);
5727   DCHECK(vd != vs2);
5728   const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5729   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5730 }
5731 
VFwmul_vf(VRegister vd,VRegister vs2,FRegister fs1,VM vm)5732 void Riscv64Assembler::VFwmul_vf(VRegister vd, VRegister vs2, FRegister fs1, VM vm) {
5733   AssertExtensionsEnabled(Riscv64Extension::kV);
5734   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5735   DCHECK(vd != vs2);
5736   const uint32_t funct7 = EncodeRVVF7(0b111000, vm);
5737   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5738 }
5739 
VFwmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5740 void Riscv64Assembler::VFwmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5741   AssertExtensionsEnabled(Riscv64Extension::kV);
5742   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5743   DCHECK(vd != vs1);
5744   DCHECK(vd != vs2);
5745   const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5746   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5747 }
5748 
VFwmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5749 void Riscv64Assembler::VFwmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5750   AssertExtensionsEnabled(Riscv64Extension::kV);
5751   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5752   DCHECK(vd != vs2);
5753   const uint32_t funct7 = EncodeRVVF7(0b111100, vm);
5754   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5755 }
5756 
VFwnmacc_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5757 void Riscv64Assembler::VFwnmacc_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5758   AssertExtensionsEnabled(Riscv64Extension::kV);
5759   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5760   DCHECK(vd != vs1);
5761   DCHECK(vd != vs2);
5762   const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5763   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5764 }
5765 
VFwnmacc_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5766 void Riscv64Assembler::VFwnmacc_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5767   AssertExtensionsEnabled(Riscv64Extension::kV);
5768   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5769   DCHECK(vd != vs2);
5770   const uint32_t funct7 = EncodeRVVF7(0b111101, vm);
5771   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5772 }
5773 
VFwmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5774 void Riscv64Assembler::VFwmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5775   AssertExtensionsEnabled(Riscv64Extension::kV);
5776   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5777   DCHECK(vd != vs1);
5778   DCHECK(vd != vs2);
5779   const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
5780   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5781 }
5782 
VFwmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5783 void Riscv64Assembler::VFwmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5784   AssertExtensionsEnabled(Riscv64Extension::kV);
5785   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5786   DCHECK(vd != vs2);
5787   const uint32_t funct7 = EncodeRVVF7(0b111110, vm);
5788   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5789 }
5790 
VFwnmsac_vv(VRegister vd,VRegister vs1,VRegister vs2,VM vm)5791 void Riscv64Assembler::VFwnmsac_vv(VRegister vd, VRegister vs1, VRegister vs2, VM vm) {
5792   AssertExtensionsEnabled(Riscv64Extension::kV);
5793   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5794   DCHECK(vd != vs1);
5795   DCHECK(vd != vs2);
5796   const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5797   EmitR(funct7, vs2, vs1, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5798 }
5799 
VFwnmsac_vf(VRegister vd,FRegister fs1,VRegister vs2,VM vm)5800 void Riscv64Assembler::VFwnmsac_vf(VRegister vd, FRegister fs1, VRegister vs2, VM vm) {
5801   AssertExtensionsEnabled(Riscv64Extension::kV);
5802   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5803   DCHECK(vd != vs2);
5804   const uint32_t funct7 = EncodeRVVF7(0b111111, vm);
5805   EmitR(funct7, vs2, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5806 }
5807 
VMv_s_x(VRegister vd,XRegister rs1)5808 void Riscv64Assembler::VMv_s_x(VRegister vd, XRegister rs1) {
5809   AssertExtensionsEnabled(Riscv64Extension::kV);
5810   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5811   EmitR(funct7, 0b00000, rs1, enum_cast<uint32_t>(VAIEncoding::kOPMVX), vd, 0x57);
5812 }
5813 
VMv_x_s(XRegister rd,VRegister vs2)5814 void Riscv64Assembler::VMv_x_s(XRegister rd, VRegister vs2) {
5815   AssertExtensionsEnabled(Riscv64Extension::kV);
5816   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5817   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
5818 }
5819 
VCpop_m(XRegister rd,VRegister vs2,VM vm)5820 void Riscv64Assembler::VCpop_m(XRegister rd, VRegister vs2, VM vm) {
5821   AssertExtensionsEnabled(Riscv64Extension::kV);
5822   const uint32_t funct7 = EncodeRVVF7(0b010000, vm);
5823   EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
5824 }
5825 
VFirst_m(XRegister rd,VRegister vs2,VM vm)5826 void Riscv64Assembler::VFirst_m(XRegister rd, VRegister vs2, VM vm) {
5827   AssertExtensionsEnabled(Riscv64Extension::kV);
5828   const uint32_t funct7 = EncodeRVVF7(0b010000, vm);
5829   EmitR(funct7, vs2, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), rd, 0x57);
5830 }
5831 
VZext_vf8(VRegister vd,VRegister vs2,VM vm)5832 void Riscv64Assembler::VZext_vf8(VRegister vd, VRegister vs2, VM vm) {
5833   AssertExtensionsEnabled(Riscv64Extension::kV);
5834   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5835   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5836   EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5837 }
5838 
VSext_vf8(VRegister vd,VRegister vs2,VM vm)5839 void Riscv64Assembler::VSext_vf8(VRegister vd, VRegister vs2, VM vm) {
5840   AssertExtensionsEnabled(Riscv64Extension::kV);
5841   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5842   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5843   EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5844 }
5845 
VZext_vf4(VRegister vd,VRegister vs2,VM vm)5846 void Riscv64Assembler::VZext_vf4(VRegister vd, VRegister vs2, VM vm) {
5847   AssertExtensionsEnabled(Riscv64Extension::kV);
5848   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5849   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5850   EmitR(funct7, vs2, 0b00100, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5851 }
5852 
VSext_vf4(VRegister vd,VRegister vs2,VM vm)5853 void Riscv64Assembler::VSext_vf4(VRegister vd, VRegister vs2, VM vm) {
5854   AssertExtensionsEnabled(Riscv64Extension::kV);
5855   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5856   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5857   EmitR(funct7, vs2, 0b00101, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5858 }
5859 
VZext_vf2(VRegister vd,VRegister vs2,VM vm)5860 void Riscv64Assembler::VZext_vf2(VRegister vd, VRegister vs2, VM vm) {
5861   AssertExtensionsEnabled(Riscv64Extension::kV);
5862   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5863   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5864   EmitR(funct7, vs2, 0b00110, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5865 }
5866 
VSext_vf2(VRegister vd,VRegister vs2,VM vm)5867 void Riscv64Assembler::VSext_vf2(VRegister vd, VRegister vs2, VM vm) {
5868   AssertExtensionsEnabled(Riscv64Extension::kV);
5869   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5870   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5871   EmitR(funct7, vs2, 0b00111, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
5872 }
5873 
VFmv_s_f(VRegister vd,FRegister fs1)5874 void Riscv64Assembler::VFmv_s_f(VRegister vd, FRegister fs1) {
5875   AssertExtensionsEnabled(Riscv64Extension::kV);
5876   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5877   EmitR(funct7, 0b00000, fs1, enum_cast<uint32_t>(VAIEncoding::kOPFVF), vd, 0x57);
5878 }
5879 
VFmv_f_s(FRegister fd,VRegister vs2)5880 void Riscv64Assembler::VFmv_f_s(FRegister fd, VRegister vs2) {
5881   AssertExtensionsEnabled(Riscv64Extension::kV);
5882   const uint32_t funct7 = EncodeRVVF7(0b010000, VM::kUnmasked);
5883   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), fd, 0x57);
5884 }
5885 
VFcvt_xu_f_v(VRegister vd,VRegister vs2,VM vm)5886 void Riscv64Assembler::VFcvt_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
5887   AssertExtensionsEnabled(Riscv64Extension::kV);
5888   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5889   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5890   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5891 }
5892 
VFcvt_x_f_v(VRegister vd,VRegister vs2,VM vm)5893 void Riscv64Assembler::VFcvt_x_f_v(VRegister vd, VRegister vs2, VM vm) {
5894   AssertExtensionsEnabled(Riscv64Extension::kV);
5895   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5896   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5897   EmitR(funct7, vs2, 0b00001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5898 }
5899 
VFcvt_f_xu_v(VRegister vd,VRegister vs2,VM vm)5900 void Riscv64Assembler::VFcvt_f_xu_v(VRegister vd, VRegister vs2, VM vm) {
5901   AssertExtensionsEnabled(Riscv64Extension::kV);
5902   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5903   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5904   EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5905 }
5906 
VFcvt_f_x_v(VRegister vd,VRegister vs2,VM vm)5907 void Riscv64Assembler::VFcvt_f_x_v(VRegister vd, VRegister vs2, VM vm) {
5908   AssertExtensionsEnabled(Riscv64Extension::kV);
5909   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5910   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5911   EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5912 }
5913 
VFcvt_rtz_xu_f_v(VRegister vd,VRegister vs2,VM vm)5914 void Riscv64Assembler::VFcvt_rtz_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
5915   AssertExtensionsEnabled(Riscv64Extension::kV);
5916   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5917   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5918   EmitR(funct7, vs2, 0b00110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5919 }
5920 
VFcvt_rtz_x_f_v(VRegister vd,VRegister vs2,VM vm)5921 void Riscv64Assembler::VFcvt_rtz_x_f_v(VRegister vd, VRegister vs2, VM vm) {
5922   AssertExtensionsEnabled(Riscv64Extension::kV);
5923   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5924   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5925   EmitR(funct7, vs2, 0b00111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5926 }
5927 
VFwcvt_xu_f_v(VRegister vd,VRegister vs2,VM vm)5928 void Riscv64Assembler::VFwcvt_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
5929   AssertExtensionsEnabled(Riscv64Extension::kV);
5930   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5931   DCHECK(vd != vs2);
5932   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5933   EmitR(funct7, vs2, 0b01000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5934 }
5935 
VFwcvt_x_f_v(VRegister vd,VRegister vs2,VM vm)5936 void Riscv64Assembler::VFwcvt_x_f_v(VRegister vd, VRegister vs2, VM vm) {
5937   AssertExtensionsEnabled(Riscv64Extension::kV);
5938   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5939   DCHECK(vd != vs2);
5940   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5941   EmitR(funct7, vs2, 0b01001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5942 }
5943 
VFwcvt_f_xu_v(VRegister vd,VRegister vs2,VM vm)5944 void Riscv64Assembler::VFwcvt_f_xu_v(VRegister vd, VRegister vs2, VM vm) {
5945   AssertExtensionsEnabled(Riscv64Extension::kV);
5946   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5947   DCHECK(vd != vs2);
5948   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5949   EmitR(funct7, vs2, 0b01010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5950 }
5951 
VFwcvt_f_x_v(VRegister vd,VRegister vs2,VM vm)5952 void Riscv64Assembler::VFwcvt_f_x_v(VRegister vd, VRegister vs2, VM vm) {
5953   AssertExtensionsEnabled(Riscv64Extension::kV);
5954   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5955   DCHECK(vd != vs2);
5956   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5957   EmitR(funct7, vs2, 0b01011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5958 }
5959 
VFwcvt_f_f_v(VRegister vd,VRegister vs2,VM vm)5960 void Riscv64Assembler::VFwcvt_f_f_v(VRegister vd, VRegister vs2, VM vm) {
5961   AssertExtensionsEnabled(Riscv64Extension::kV);
5962   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5963   DCHECK(vd != vs2);
5964   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5965   EmitR(funct7, vs2, 0b01100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5966 }
5967 
VFwcvt_rtz_xu_f_v(VRegister vd,VRegister vs2,VM vm)5968 void Riscv64Assembler::VFwcvt_rtz_xu_f_v(VRegister vd, VRegister vs2, VM vm) {
5969   AssertExtensionsEnabled(Riscv64Extension::kV);
5970   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5971   DCHECK(vd != vs2);
5972   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5973   EmitR(funct7, vs2, 0b01110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5974 }
5975 
VFwcvt_rtz_x_f_v(VRegister vd,VRegister vs2,VM vm)5976 void Riscv64Assembler::VFwcvt_rtz_x_f_v(VRegister vd, VRegister vs2, VM vm) {
5977   AssertExtensionsEnabled(Riscv64Extension::kV);
5978   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5979   DCHECK(vd != vs2);
5980   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5981   EmitR(funct7, vs2, 0b01111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5982 }
5983 
VFncvt_xu_f_w(VRegister vd,VRegister vs2,VM vm)5984 void Riscv64Assembler::VFncvt_xu_f_w(VRegister vd, VRegister vs2, VM vm) {
5985   AssertExtensionsEnabled(Riscv64Extension::kV);
5986   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5987   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5988   EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5989 }
5990 
VFncvt_x_f_w(VRegister vd,VRegister vs2,VM vm)5991 void Riscv64Assembler::VFncvt_x_f_w(VRegister vd, VRegister vs2, VM vm) {
5992   AssertExtensionsEnabled(Riscv64Extension::kV);
5993   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
5994   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
5995   EmitR(funct7, vs2, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
5996 }
5997 
VFncvt_f_xu_w(VRegister vd,VRegister vs2,VM vm)5998 void Riscv64Assembler::VFncvt_f_xu_w(VRegister vd, VRegister vs2, VM vm) {
5999   AssertExtensionsEnabled(Riscv64Extension::kV);
6000   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6001   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6002   EmitR(funct7, vs2, 0b10010, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6003 }
6004 
VFncvt_f_x_w(VRegister vd,VRegister vs2,VM vm)6005 void Riscv64Assembler::VFncvt_f_x_w(VRegister vd, VRegister vs2, VM vm) {
6006   AssertExtensionsEnabled(Riscv64Extension::kV);
6007   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6008   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6009   EmitR(funct7, vs2, 0b10011, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6010 }
6011 
VFncvt_f_f_w(VRegister vd,VRegister vs2,VM vm)6012 void Riscv64Assembler::VFncvt_f_f_w(VRegister vd, VRegister vs2, VM vm) {
6013   AssertExtensionsEnabled(Riscv64Extension::kV);
6014   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6015   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6016   EmitR(funct7, vs2, 0b10100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6017 }
6018 
VFncvt_rod_f_f_w(VRegister vd,VRegister vs2,VM vm)6019 void Riscv64Assembler::VFncvt_rod_f_f_w(VRegister vd, VRegister vs2, VM vm) {
6020   AssertExtensionsEnabled(Riscv64Extension::kV);
6021   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6022   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6023   EmitR(funct7, vs2, 0b10101, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6024 }
6025 
VFncvt_rtz_xu_f_w(VRegister vd,VRegister vs2,VM vm)6026 void Riscv64Assembler::VFncvt_rtz_xu_f_w(VRegister vd, VRegister vs2, VM vm) {
6027   AssertExtensionsEnabled(Riscv64Extension::kV);
6028   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6029   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6030   EmitR(funct7, vs2, 0b10110, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6031 }
6032 
VFncvt_rtz_x_f_w(VRegister vd,VRegister vs2,VM vm)6033 void Riscv64Assembler::VFncvt_rtz_x_f_w(VRegister vd, VRegister vs2, VM vm) {
6034   AssertExtensionsEnabled(Riscv64Extension::kV);
6035   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6036   const uint32_t funct7 = EncodeRVVF7(0b010010, vm);
6037   EmitR(funct7, vs2, 0b10111, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6038 }
6039 
VFsqrt_v(VRegister vd,VRegister vs2,VM vm)6040 void Riscv64Assembler::VFsqrt_v(VRegister vd, VRegister vs2, VM vm) {
6041   AssertExtensionsEnabled(Riscv64Extension::kV);
6042   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6043   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6044   EmitR(funct7, vs2, 0b00000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6045 }
6046 
VFrsqrt7_v(VRegister vd,VRegister vs2,VM vm)6047 void Riscv64Assembler::VFrsqrt7_v(VRegister vd, VRegister vs2, VM vm) {
6048   AssertExtensionsEnabled(Riscv64Extension::kV);
6049   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6050   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6051   EmitR(funct7, vs2, 0b00100, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6052 }
6053 
VFrec7_v(VRegister vd,VRegister vs2,VM vm)6054 void Riscv64Assembler::VFrec7_v(VRegister vd, VRegister vs2, VM vm) {
6055   AssertExtensionsEnabled(Riscv64Extension::kV);
6056   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6057   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6058   EmitR(funct7, vs2, 0b00101, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6059 }
6060 
VFclass_v(VRegister vd,VRegister vs2,VM vm)6061 void Riscv64Assembler::VFclass_v(VRegister vd, VRegister vs2, VM vm) {
6062   AssertExtensionsEnabled(Riscv64Extension::kV);
6063   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6064   const uint32_t funct7 = EncodeRVVF7(0b010011, vm);
6065   EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPFVV), vd, 0x57);
6066 }
6067 
VMsbf_m(VRegister vd,VRegister vs2,VM vm)6068 void Riscv64Assembler::VMsbf_m(VRegister vd, VRegister vs2, VM vm) {
6069   AssertExtensionsEnabled(Riscv64Extension::kV);
6070   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6071   DCHECK(vd != vs2);
6072   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6073   EmitR(funct7, vs2, 0b00001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6074 }
6075 
VMsof_m(VRegister vd,VRegister vs2,VM vm)6076 void Riscv64Assembler::VMsof_m(VRegister vd, VRegister vs2, VM vm) {
6077   AssertExtensionsEnabled(Riscv64Extension::kV);
6078   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6079   DCHECK(vd != vs2);
6080   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6081   EmitR(funct7, vs2, 0b00010, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6082 }
6083 
VMsif_m(VRegister vd,VRegister vs2,VM vm)6084 void Riscv64Assembler::VMsif_m(VRegister vd, VRegister vs2, VM vm) {
6085   AssertExtensionsEnabled(Riscv64Extension::kV);
6086   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6087   DCHECK(vd != vs2);
6088   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6089   EmitR(funct7, vs2, 0b00011, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6090 }
6091 
VIota_m(VRegister vd,VRegister vs2,VM vm)6092 void Riscv64Assembler::VIota_m(VRegister vd, VRegister vs2, VM vm) {
6093   AssertExtensionsEnabled(Riscv64Extension::kV);
6094   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6095   DCHECK(vd != vs2);
6096   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6097   EmitR(funct7, vs2, 0b10000, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6098 }
6099 
VId_v(VRegister vd,VM vm)6100 void Riscv64Assembler::VId_v(VRegister vd, VM vm) {
6101   AssertExtensionsEnabled(Riscv64Extension::kV);
6102   DCHECK_IMPLIES(vm == VM::kV0_t, vd != V0);
6103   const uint32_t funct7 = EncodeRVVF7(0b010100, vm);
6104   EmitR(funct7, V0, 0b10001, enum_cast<uint32_t>(VAIEncoding::kOPMVV), vd, 0x57);
6105 }
6106 
6107 /////////////////////////////// RVV Arithmetic Instructions  END   /////////////////////////////
6108 
6109 ////////////////////////////// RV64 MACRO Instructions  START ///////////////////////////////
6110 
6111 // Pseudo instructions
6112 
Nop()6113 void Riscv64Assembler::Nop() { Addi(Zero, Zero, 0); }
6114 
Li(XRegister rd,int64_t imm)6115 void Riscv64Assembler::Li(XRegister rd, int64_t imm) {
6116   LoadImmediate(rd, imm, /*can_use_tmp=*/ false);
6117 }
6118 
Mv(XRegister rd,XRegister rs)6119 void Riscv64Assembler::Mv(XRegister rd, XRegister rs) { Addi(rd, rs, 0); }
6120 
Not(XRegister rd,XRegister rs)6121 void Riscv64Assembler::Not(XRegister rd, XRegister rs) { Xori(rd, rs, -1); }
6122 
Neg(XRegister rd,XRegister rs)6123 void Riscv64Assembler::Neg(XRegister rd, XRegister rs) { Sub(rd, Zero, rs); }
6124 
NegW(XRegister rd,XRegister rs)6125 void Riscv64Assembler::NegW(XRegister rd, XRegister rs) { Subw(rd, Zero, rs); }
6126 
SextB(XRegister rd,XRegister rs)6127 void Riscv64Assembler::SextB(XRegister rd, XRegister rs) {
6128   if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
6129     if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6130       CSextB(rd);
6131     } else {
6132       ZbbSextB(rd, rs);
6133     }
6134   } else {
6135     Slli(rd, rs, kXlen - 8u);
6136     Srai(rd, rd, kXlen - 8u);
6137   }
6138 }
6139 
SextH(XRegister rd,XRegister rs)6140 void Riscv64Assembler::SextH(XRegister rd, XRegister rs) {
6141   if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
6142     if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6143       CSextH(rd);
6144     } else {
6145       ZbbSextH(rd, rs);
6146     }
6147   } else {
6148     Slli(rd, rs, kXlen - 16u);
6149     Srai(rd, rd, kXlen - 16u);
6150   }
6151 }
6152 
SextW(XRegister rd,XRegister rs)6153 void Riscv64Assembler::SextW(XRegister rd, XRegister rs) {
6154   if (IsExtensionEnabled(Riscv64Extension::kZca) && rd != Zero && (rd == rs || rs == Zero)) {
6155     if (rd == rs) {
6156       CAddiw(rd, 0);
6157     } else {
6158       CLi(rd, 0);
6159     }
6160   } else {
6161     Addiw(rd, rs, 0);
6162   }
6163 }
6164 
ZextB(XRegister rd,XRegister rs)6165 void Riscv64Assembler::ZextB(XRegister rd, XRegister rs) {
6166   if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6167     CZextB(rd);
6168   } else {
6169     Andi(rd, rs, 0xff);
6170   }
6171 }
6172 
ZextH(XRegister rd,XRegister rs)6173 void Riscv64Assembler::ZextH(XRegister rd, XRegister rs) {
6174   if (IsExtensionEnabled(Riscv64Extension::kZbb)) {
6175     if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6176       CZextH(rd);
6177     } else {
6178       ZbbZextH(rd, rs);
6179     }
6180   } else {
6181     Slli(rd, rs, kXlen - 16u);
6182     Srli(rd, rd, kXlen - 16u);
6183   }
6184 }
6185 
ZextW(XRegister rd,XRegister rs)6186 void Riscv64Assembler::ZextW(XRegister rd, XRegister rs) {
6187   if (IsExtensionEnabled(Riscv64Extension::kZba)) {
6188     if (IsExtensionEnabled(Riscv64Extension::kZcb) && rd == rs && IsShortReg(rd)) {
6189       CZextW(rd);
6190     } else {
6191       AddUw(rd, rs, Zero);
6192     }
6193   } else {
6194     Slli(rd, rs, kXlen - 32u);
6195     Srli(rd, rd, kXlen - 32u);
6196   }
6197 }
6198 
Seqz(XRegister rd,XRegister rs)6199 void Riscv64Assembler::Seqz(XRegister rd, XRegister rs) { Sltiu(rd, rs, 1); }
6200 
Snez(XRegister rd,XRegister rs)6201 void Riscv64Assembler::Snez(XRegister rd, XRegister rs) { Sltu(rd, Zero, rs); }
6202 
Sltz(XRegister rd,XRegister rs)6203 void Riscv64Assembler::Sltz(XRegister rd, XRegister rs) { Slt(rd, rs, Zero); }
6204 
Sgtz(XRegister rd,XRegister rs)6205 void Riscv64Assembler::Sgtz(XRegister rd, XRegister rs) { Slt(rd, Zero, rs); }
6206 
FMvS(FRegister rd,FRegister rs)6207 void Riscv64Assembler::FMvS(FRegister rd, FRegister rs) { FSgnjS(rd, rs, rs); }
6208 
FAbsS(FRegister rd,FRegister rs)6209 void Riscv64Assembler::FAbsS(FRegister rd, FRegister rs) { FSgnjxS(rd, rs, rs); }
6210 
FNegS(FRegister rd,FRegister rs)6211 void Riscv64Assembler::FNegS(FRegister rd, FRegister rs) { FSgnjnS(rd, rs, rs); }
6212 
FMvD(FRegister rd,FRegister rs)6213 void Riscv64Assembler::FMvD(FRegister rd, FRegister rs) { FSgnjD(rd, rs, rs); }
6214 
FAbsD(FRegister rd,FRegister rs)6215 void Riscv64Assembler::FAbsD(FRegister rd, FRegister rs) { FSgnjxD(rd, rs, rs); }
6216 
FNegD(FRegister rd,FRegister rs)6217 void Riscv64Assembler::FNegD(FRegister rd, FRegister rs) { FSgnjnD(rd, rs, rs); }
6218 
Beqz(XRegister rs,int32_t offset)6219 void Riscv64Assembler::Beqz(XRegister rs, int32_t offset) {
6220   Beq(rs, Zero, offset);
6221 }
6222 
Bnez(XRegister rs,int32_t offset)6223 void Riscv64Assembler::Bnez(XRegister rs, int32_t offset) {
6224   Bne(rs, Zero, offset);
6225 }
6226 
Blez(XRegister rt,int32_t offset)6227 void Riscv64Assembler::Blez(XRegister rt, int32_t offset) {
6228   Bge(Zero, rt, offset);
6229 }
6230 
Bgez(XRegister rt,int32_t offset)6231 void Riscv64Assembler::Bgez(XRegister rt, int32_t offset) {
6232   Bge(rt, Zero, offset);
6233 }
6234 
Bltz(XRegister rt,int32_t offset)6235 void Riscv64Assembler::Bltz(XRegister rt, int32_t offset) {
6236   Blt(rt, Zero, offset);
6237 }
6238 
Bgtz(XRegister rt,int32_t offset)6239 void Riscv64Assembler::Bgtz(XRegister rt, int32_t offset) {
6240   Blt(Zero, rt, offset);
6241 }
6242 
Bgt(XRegister rs,XRegister rt,int32_t offset)6243 void Riscv64Assembler::Bgt(XRegister rs, XRegister rt, int32_t offset) {
6244   Blt(rt, rs, offset);
6245 }
6246 
Ble(XRegister rs,XRegister rt,int32_t offset)6247 void Riscv64Assembler::Ble(XRegister rs, XRegister rt, int32_t offset) {
6248   Bge(rt, rs, offset);
6249 }
6250 
Bgtu(XRegister rs,XRegister rt,int32_t offset)6251 void Riscv64Assembler::Bgtu(XRegister rs, XRegister rt, int32_t offset) {
6252   Bltu(rt, rs, offset);
6253 }
6254 
Bleu(XRegister rs,XRegister rt,int32_t offset)6255 void Riscv64Assembler::Bleu(XRegister rs, XRegister rt, int32_t offset) {
6256   Bgeu(rt, rs, offset);
6257 }
6258 
J(int32_t offset)6259 void Riscv64Assembler::J(int32_t offset) { Jal(Zero, offset); }
6260 
Jal(int32_t offset)6261 void Riscv64Assembler::Jal(int32_t offset) { Jal(RA, offset); }
6262 
Jr(XRegister rs)6263 void Riscv64Assembler::Jr(XRegister rs) { Jalr(Zero, rs, 0); }
6264 
Jalr(XRegister rs)6265 void Riscv64Assembler::Jalr(XRegister rs) { Jalr(RA, rs, 0); }
6266 
Jalr(XRegister rd,XRegister rs)6267 void Riscv64Assembler::Jalr(XRegister rd, XRegister rs) { Jalr(rd, rs, 0); }
6268 
Ret()6269 void Riscv64Assembler::Ret() { Jalr(Zero, RA, 0); }
6270 
RdCycle(XRegister rd)6271 void Riscv64Assembler::RdCycle(XRegister rd) {
6272   Csrrs(rd, 0xc00, Zero);
6273 }
6274 
RdTime(XRegister rd)6275 void Riscv64Assembler::RdTime(XRegister rd) {
6276   Csrrs(rd, 0xc01, Zero);
6277 }
6278 
RdInstret(XRegister rd)6279 void Riscv64Assembler::RdInstret(XRegister rd) {
6280   Csrrs(rd, 0xc02, Zero);
6281 }
6282 
Csrr(XRegister rd,uint32_t csr)6283 void Riscv64Assembler::Csrr(XRegister rd, uint32_t csr) {
6284   Csrrs(rd, csr, Zero);
6285 }
6286 
Csrw(uint32_t csr,XRegister rs)6287 void Riscv64Assembler::Csrw(uint32_t csr, XRegister rs) {
6288   Csrrw(Zero, csr, rs);
6289 }
6290 
Csrs(uint32_t csr,XRegister rs)6291 void Riscv64Assembler::Csrs(uint32_t csr, XRegister rs) {
6292   Csrrs(Zero, csr, rs);
6293 }
6294 
Csrc(uint32_t csr,XRegister rs)6295 void Riscv64Assembler::Csrc(uint32_t csr, XRegister rs) {
6296   Csrrc(Zero, csr, rs);
6297 }
6298 
Csrwi(uint32_t csr,uint32_t uimm5)6299 void Riscv64Assembler::Csrwi(uint32_t csr, uint32_t uimm5) {
6300   Csrrwi(Zero, csr, uimm5);
6301 }
6302 
Csrsi(uint32_t csr,uint32_t uimm5)6303 void Riscv64Assembler::Csrsi(uint32_t csr, uint32_t uimm5) {
6304   Csrrsi(Zero, csr, uimm5);
6305 }
6306 
Csrci(uint32_t csr,uint32_t uimm5)6307 void Riscv64Assembler::Csrci(uint32_t csr, uint32_t uimm5) {
6308   Csrrci(Zero, csr, uimm5);
6309 }
6310 
Loadb(XRegister rd,XRegister rs1,int32_t offset)6311 void Riscv64Assembler::Loadb(XRegister rd, XRegister rs1, int32_t offset) {
6312   LoadFromOffset<&Riscv64Assembler::Lb>(rd, rs1, offset);
6313 }
6314 
Loadh(XRegister rd,XRegister rs1,int32_t offset)6315 void Riscv64Assembler::Loadh(XRegister rd, XRegister rs1, int32_t offset) {
6316   LoadFromOffset<&Riscv64Assembler::Lh>(rd, rs1, offset);
6317 }
6318 
Loadw(XRegister rd,XRegister rs1,int32_t offset)6319 void Riscv64Assembler::Loadw(XRegister rd, XRegister rs1, int32_t offset) {
6320   LoadFromOffset<&Riscv64Assembler::Lw>(rd, rs1, offset);
6321 }
6322 
Loadd(XRegister rd,XRegister rs1,int32_t offset)6323 void Riscv64Assembler::Loadd(XRegister rd, XRegister rs1, int32_t offset) {
6324   LoadFromOffset<&Riscv64Assembler::Ld>(rd, rs1, offset);
6325 }
6326 
Loadbu(XRegister rd,XRegister rs1,int32_t offset)6327 void Riscv64Assembler::Loadbu(XRegister rd, XRegister rs1, int32_t offset) {
6328   LoadFromOffset<&Riscv64Assembler::Lbu>(rd, rs1, offset);
6329 }
6330 
Loadhu(XRegister rd,XRegister rs1,int32_t offset)6331 void Riscv64Assembler::Loadhu(XRegister rd, XRegister rs1, int32_t offset) {
6332   LoadFromOffset<&Riscv64Assembler::Lhu>(rd, rs1, offset);
6333 }
6334 
Loadwu(XRegister rd,XRegister rs1,int32_t offset)6335 void Riscv64Assembler::Loadwu(XRegister rd, XRegister rs1, int32_t offset) {
6336   LoadFromOffset<&Riscv64Assembler::Lwu>(rd, rs1, offset);
6337 }
6338 
Storeb(XRegister rs2,XRegister rs1,int32_t offset)6339 void Riscv64Assembler::Storeb(XRegister rs2, XRegister rs1, int32_t offset) {
6340   StoreToOffset<&Riscv64Assembler::Sb>(rs2, rs1, offset);
6341 }
6342 
Storeh(XRegister rs2,XRegister rs1,int32_t offset)6343 void Riscv64Assembler::Storeh(XRegister rs2, XRegister rs1, int32_t offset) {
6344   StoreToOffset<&Riscv64Assembler::Sh>(rs2, rs1, offset);
6345 }
6346 
Storew(XRegister rs2,XRegister rs1,int32_t offset)6347 void Riscv64Assembler::Storew(XRegister rs2, XRegister rs1, int32_t offset) {
6348   StoreToOffset<&Riscv64Assembler::Sw>(rs2, rs1, offset);
6349 }
6350 
Stored(XRegister rs2,XRegister rs1,int32_t offset)6351 void Riscv64Assembler::Stored(XRegister rs2, XRegister rs1, int32_t offset) {
6352   StoreToOffset<&Riscv64Assembler::Sd>(rs2, rs1, offset);
6353 }
6354 
FLoadw(FRegister rd,XRegister rs1,int32_t offset)6355 void Riscv64Assembler::FLoadw(FRegister rd, XRegister rs1, int32_t offset) {
6356   FLoadFromOffset<&Riscv64Assembler::FLw>(rd, rs1, offset);
6357 }
6358 
FLoadd(FRegister rd,XRegister rs1,int32_t offset)6359 void Riscv64Assembler::FLoadd(FRegister rd, XRegister rs1, int32_t offset) {
6360   FLoadFromOffset<&Riscv64Assembler::FLd>(rd, rs1, offset);
6361 }
6362 
FStorew(FRegister rs2,XRegister rs1,int32_t offset)6363 void Riscv64Assembler::FStorew(FRegister rs2, XRegister rs1, int32_t offset) {
6364   FStoreToOffset<&Riscv64Assembler::FSw>(rs2, rs1, offset);
6365 }
6366 
FStored(FRegister rs2,XRegister rs1,int32_t offset)6367 void Riscv64Assembler::FStored(FRegister rs2, XRegister rs1, int32_t offset) {
6368   FStoreToOffset<&Riscv64Assembler::FSd>(rs2, rs1, offset);
6369 }
6370 
LoadConst32(XRegister rd,int32_t value)6371 void Riscv64Assembler::LoadConst32(XRegister rd, int32_t value) {
6372   // No need to use a temporary register for 32-bit values.
6373   LoadImmediate(rd, value, /*can_use_tmp=*/ false);
6374 }
6375 
LoadConst64(XRegister rd,int64_t value)6376 void Riscv64Assembler::LoadConst64(XRegister rd, int64_t value) {
6377   LoadImmediate(rd, value, /*can_use_tmp=*/ true);
6378 }
6379 
6380 template <typename ValueType, typename Addi, typename AddLarge>
AddConstImpl(Riscv64Assembler * assembler,XRegister rd,XRegister rs1,ValueType value,Addi && addi,AddLarge && add_large)6381 void AddConstImpl(Riscv64Assembler* assembler,
6382                   XRegister rd,
6383                   XRegister rs1,
6384                   ValueType value,
6385                   Addi&& addi,
6386                   AddLarge&& add_large) {
6387   ScratchRegisterScope srs(assembler);
6388   // A temporary must be available for adjustment even if it's not needed.
6389   // However, `rd` can be used as the temporary unless it's the same as `rs1` or SP.
6390   DCHECK_IMPLIES(rd == rs1 || rd == SP, srs.AvailableXRegisters() != 0u);
6391 
6392   if (IsInt<12>(value)) {
6393     addi(rd, rs1, value);
6394     return;
6395   }
6396 
6397   constexpr int32_t kPositiveValueSimpleAdjustment = 0x7ff;
6398   constexpr int32_t kHighestValueForSimpleAdjustment = 2 * kPositiveValueSimpleAdjustment;
6399   constexpr int32_t kNegativeValueSimpleAdjustment = -0x800;
6400   constexpr int32_t kLowestValueForSimpleAdjustment = 2 * kNegativeValueSimpleAdjustment;
6401 
6402   if (rd != rs1 && rd != SP) {
6403     srs.IncludeXRegister(rd);
6404   }
6405   XRegister tmp = srs.AllocateXRegister();
6406   if (value >= 0 && value <= kHighestValueForSimpleAdjustment) {
6407     addi(tmp, rs1, kPositiveValueSimpleAdjustment);
6408     addi(rd, tmp, value - kPositiveValueSimpleAdjustment);
6409   } else if (value < 0 && value >= kLowestValueForSimpleAdjustment) {
6410     addi(tmp, rs1, kNegativeValueSimpleAdjustment);
6411     addi(rd, tmp, value - kNegativeValueSimpleAdjustment);
6412   } else {
6413     add_large(rd, rs1, value, tmp);
6414   }
6415 }
6416 
AddConst32(XRegister rd,XRegister rs1,int32_t value)6417 void Riscv64Assembler::AddConst32(XRegister rd, XRegister rs1, int32_t value) {
6418   CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
6419   CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
6420   auto addiw = [&](XRegister rd, XRegister rs1, int32_t value) { Addiw(rd, rs1, value); };
6421   auto add_large = [&](XRegister rd, XRegister rs1, int32_t value, XRegister tmp) {
6422     LoadConst32(tmp, value);
6423     Addw(rd, rs1, tmp);
6424   };
6425   AddConstImpl(this, rd, rs1, value, addiw, add_large);
6426 }
6427 
AddConst64(XRegister rd,XRegister rs1,int64_t value)6428 void Riscv64Assembler::AddConst64(XRegister rd, XRegister rs1, int64_t value) {
6429   CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
6430   CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
6431   auto addi = [&](XRegister rd, XRegister rs1, int32_t value) { Addi(rd, rs1, value); };
6432   auto add_large = [&](XRegister rd, XRegister rs1, int64_t value, XRegister tmp) {
6433     // We may not have another scratch register for `LoadConst64()`, so use `Li()`.
6434     // TODO(riscv64): Refactor `LoadImmediate()` so that we can reuse the code to detect
6435     // when the code path using the scratch reg is beneficial, and use that path with a
6436     // small modification - instead of adding the two parts togeter, add them individually
6437     // to the input `rs1`. (This works as long as `rd` is not the same as `tmp`.)
6438     Li(tmp, value);
6439     Add(rd, rs1, tmp);
6440   };
6441   AddConstImpl(this, rd, rs1, value, addi, add_large);
6442 }
6443 
Beqz(XRegister rs,Riscv64Label * label,bool is_bare)6444 void Riscv64Assembler::Beqz(XRegister rs, Riscv64Label* label, bool is_bare) {
6445   Beq(rs, Zero, label, is_bare);
6446 }
6447 
Bnez(XRegister rs,Riscv64Label * label,bool is_bare)6448 void Riscv64Assembler::Bnez(XRegister rs, Riscv64Label* label, bool is_bare) {
6449   Bne(rs, Zero, label, is_bare);
6450 }
6451 
Blez(XRegister rs,Riscv64Label * label,bool is_bare)6452 void Riscv64Assembler::Blez(XRegister rs, Riscv64Label* label, bool is_bare) {
6453   Ble(rs, Zero, label, is_bare);
6454 }
6455 
Bgez(XRegister rs,Riscv64Label * label,bool is_bare)6456 void Riscv64Assembler::Bgez(XRegister rs, Riscv64Label* label, bool is_bare) {
6457   Bge(rs, Zero, label, is_bare);
6458 }
6459 
Bltz(XRegister rs,Riscv64Label * label,bool is_bare)6460 void Riscv64Assembler::Bltz(XRegister rs, Riscv64Label* label, bool is_bare) {
6461   Blt(rs, Zero, label, is_bare);
6462 }
6463 
Bgtz(XRegister rs,Riscv64Label * label,bool is_bare)6464 void Riscv64Assembler::Bgtz(XRegister rs, Riscv64Label* label, bool is_bare) {
6465   Bgt(rs, Zero, label, is_bare);
6466 }
6467 
Beq(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6468 void Riscv64Assembler::Beq(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6469   Bcond(label, is_bare, kCondEQ, rs, rt);
6470 }
6471 
Bne(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6472 void Riscv64Assembler::Bne(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6473   Bcond(label, is_bare, kCondNE, rs, rt);
6474 }
6475 
Ble(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6476 void Riscv64Assembler::Ble(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6477   Bcond(label, is_bare, kCondLE, rs, rt);
6478 }
6479 
Bge(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6480 void Riscv64Assembler::Bge(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6481   Bcond(label, is_bare, kCondGE, rs, rt);
6482 }
6483 
Blt(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6484 void Riscv64Assembler::Blt(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6485   Bcond(label, is_bare, kCondLT, rs, rt);
6486 }
6487 
Bgt(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6488 void Riscv64Assembler::Bgt(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6489   Bcond(label, is_bare, kCondGT, rs, rt);
6490 }
6491 
Bleu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6492 void Riscv64Assembler::Bleu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6493   Bcond(label, is_bare, kCondLEU, rs, rt);
6494 }
6495 
Bgeu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6496 void Riscv64Assembler::Bgeu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6497   Bcond(label, is_bare, kCondGEU, rs, rt);
6498 }
6499 
Bltu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6500 void Riscv64Assembler::Bltu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6501   Bcond(label, is_bare, kCondLTU, rs, rt);
6502 }
6503 
Bgtu(XRegister rs,XRegister rt,Riscv64Label * label,bool is_bare)6504 void Riscv64Assembler::Bgtu(XRegister rs, XRegister rt, Riscv64Label* label, bool is_bare) {
6505   Bcond(label, is_bare, kCondGTU, rs, rt);
6506 }
6507 
Jal(XRegister rd,Riscv64Label * label,bool is_bare)6508 void Riscv64Assembler::Jal(XRegister rd, Riscv64Label* label, bool is_bare) {
6509   Buncond(label, rd, is_bare);
6510 }
6511 
J(Riscv64Label * label,bool is_bare)6512 void Riscv64Assembler::J(Riscv64Label* label, bool is_bare) {
6513   Jal(Zero, label, is_bare);
6514 }
6515 
Jal(Riscv64Label * label,bool is_bare)6516 void Riscv64Assembler::Jal(Riscv64Label* label, bool is_bare) {
6517   Jal(RA, label, is_bare);
6518 }
6519 
Loadw(XRegister rd,Literal * literal)6520 void Riscv64Assembler::Loadw(XRegister rd, Literal* literal) {
6521   DCHECK_EQ(literal->GetSize(), 4u);
6522   LoadLiteral(literal, rd, Branch::kLiteral);
6523 }
6524 
Loadwu(XRegister rd,Literal * literal)6525 void Riscv64Assembler::Loadwu(XRegister rd, Literal* literal) {
6526   DCHECK_EQ(literal->GetSize(), 4u);
6527   LoadLiteral(literal, rd, Branch::kLiteralUnsigned);
6528 }
6529 
Loadd(XRegister rd,Literal * literal)6530 void Riscv64Assembler::Loadd(XRegister rd, Literal* literal) {
6531   DCHECK_EQ(literal->GetSize(), 8u);
6532   LoadLiteral(literal, rd, Branch::kLiteralLong);
6533 }
6534 
FLoadw(FRegister rd,Literal * literal)6535 void Riscv64Assembler::FLoadw(FRegister rd, Literal* literal) {
6536   DCHECK_EQ(literal->GetSize(), 4u);
6537   LoadLiteral(literal, rd, Branch::kLiteralFloat);
6538 }
6539 
FLoadd(FRegister rd,Literal * literal)6540 void Riscv64Assembler::FLoadd(FRegister rd, Literal* literal) {
6541   DCHECK_EQ(literal->GetSize(), 8u);
6542   LoadLiteral(literal, rd, Branch::kLiteralDouble);
6543 }
6544 
Unimp()6545 void Riscv64Assembler::Unimp() {
6546   if (IsExtensionEnabled(Riscv64Extension::kZca)) {
6547     CUnimp();
6548   } else {
6549     Emit32(0xC0001073);
6550   }
6551 }
6552 
6553 /////////////////////////////// RV64 MACRO Instructions END ///////////////////////////////
6554 
6555 const Riscv64Assembler::Branch::BranchInfo Riscv64Assembler::Branch::branch_info_[] = {
6556     // Compressed branches (can be promoted to longer)
6557     {2, 0, Riscv64Assembler::Branch::kOffset9},   // kCondCBranch
6558     {2, 0, Riscv64Assembler::Branch::kOffset12},  // kUncondCBranch
6559     // Compressed branches (can't be promoted to longer)
6560     {2, 0, Riscv64Assembler::Branch::kOffset9},   // kBareCondCBranch
6561     {2, 0, Riscv64Assembler::Branch::kOffset12},  // kBareUncondCBranch
6562 
6563     // Short branches (can be promoted to longer).
6564     {4, 0, Riscv64Assembler::Branch::kOffset13},  // kCondBranch
6565     {4, 0, Riscv64Assembler::Branch::kOffset21},  // kUncondBranch
6566     {4, 0, Riscv64Assembler::Branch::kOffset21},  // kCall
6567     // Short branches (can't be promoted to longer).
6568     {4, 0, Riscv64Assembler::Branch::kOffset13},  // kBareCondBranch
6569     {4, 0, Riscv64Assembler::Branch::kOffset21},  // kBareUncondBranch
6570     {4, 0, Riscv64Assembler::Branch::kOffset21},  // kBareCall
6571 
6572     // Medium branches.
6573     {6, 2, Riscv64Assembler::Branch::kOffset21},  // kCondCBranch21
6574     {8, 4, Riscv64Assembler::Branch::kOffset21},  // kCondBranch21
6575 
6576     // Long branches.
6577     {12, 4, Riscv64Assembler::Branch::kOffset32},  // kLongCondBranch
6578     {8, 0, Riscv64Assembler::Branch::kOffset32},   // kLongUncondBranch
6579     {8, 0, Riscv64Assembler::Branch::kOffset32},   // kLongCall
6580 
6581     // label.
6582     {8, 0, Riscv64Assembler::Branch::kOffset32},  // kLabel
6583 
6584     // literals.
6585     {8, 0, Riscv64Assembler::Branch::kOffset32},  // kLiteral
6586     {8, 0, Riscv64Assembler::Branch::kOffset32},  // kLiteralUnsigned
6587     {8, 0, Riscv64Assembler::Branch::kOffset32},  // kLiteralLong
6588     {8, 0, Riscv64Assembler::Branch::kOffset32},  // kLiteralFloat
6589     {8, 0, Riscv64Assembler::Branch::kOffset32},  // kLiteralDouble
6590 };
6591 
InitShortOrLong(Riscv64Assembler::Branch::OffsetBits offset_size,Riscv64Assembler::Branch::Type short_type,Riscv64Assembler::Branch::Type long_type,Riscv64Assembler::Branch::Type longest_type)6592 void Riscv64Assembler::Branch::InitShortOrLong(Riscv64Assembler::Branch::OffsetBits offset_size,
6593                                                Riscv64Assembler::Branch::Type short_type,
6594                                                Riscv64Assembler::Branch::Type long_type,
6595                                                Riscv64Assembler::Branch::Type longest_type) {
6596   Riscv64Assembler::Branch::Type type = short_type;
6597   if (offset_size > branch_info_[type].offset_size) {
6598     type = long_type;
6599     if (offset_size > branch_info_[type].offset_size) {
6600       type = longest_type;
6601     }
6602   }
6603   type_ = type;
6604 }
6605 
InitShortOrLong(Riscv64Assembler::Branch::OffsetBits offset_size,Riscv64Assembler::Branch::Type compressed_type,Riscv64Assembler::Branch::Type short_type,Riscv64Assembler::Branch::Type long_type,Riscv64Assembler::Branch::Type longest_type)6606 void Riscv64Assembler::Branch::InitShortOrLong(Riscv64Assembler::Branch::OffsetBits offset_size,
6607                                                Riscv64Assembler::Branch::Type compressed_type,
6608                                                Riscv64Assembler::Branch::Type short_type,
6609                                                Riscv64Assembler::Branch::Type long_type,
6610                                                Riscv64Assembler::Branch::Type longest_type) {
6611   Riscv64Assembler::Branch::Type type = compressed_type;
6612   if (offset_size > branch_info_[type].offset_size) {
6613     type = short_type;
6614     if (offset_size > branch_info_[type].offset_size) {
6615       type = long_type;
6616       if (offset_size > branch_info_[type].offset_size) {
6617         type = longest_type;
6618       }
6619     }
6620   }
6621   type_ = type;
6622 }
6623 
InitializeType(Type initial_type)6624 void Riscv64Assembler::Branch::InitializeType(Type initial_type) {
6625   OffsetBits offset_size_needed = GetOffsetSizeNeeded(location_, target_);
6626 
6627   switch (initial_type) {
6628     case kCondCBranch:
6629       CHECK(IsCompressableCondition());
6630       if (condition_ != kUncond) {
6631         InitShortOrLong(
6632             offset_size_needed, kCondCBranch, kCondBranch, kCondCBranch21, kLongCondBranch);
6633         break;
6634       }
6635       FALLTHROUGH_INTENDED;
6636     case kUncondCBranch:
6637       InitShortOrLong(offset_size_needed, kUncondCBranch, kUncondBranch, kLongUncondBranch);
6638       break;
6639     case kBareCondCBranch:
6640       if (condition_ != kUncond) {
6641         type_ = kBareCondCBranch;
6642         CHECK_LE(offset_size_needed, GetOffsetSize());
6643         break;
6644       }
6645       FALLTHROUGH_INTENDED;
6646     case kBareUncondCBranch:
6647       type_ = kBareUncondCBranch;
6648       CHECK_LE(offset_size_needed, GetOffsetSize());
6649       break;
6650     case kCondBranch:
6651       if (condition_ != kUncond) {
6652         InitShortOrLong(offset_size_needed, kCondBranch, kCondBranch21, kLongCondBranch);
6653         break;
6654       }
6655       FALLTHROUGH_INTENDED;
6656     case kUncondBranch:
6657       InitShortOrLong(offset_size_needed, kUncondBranch, kLongUncondBranch, kLongUncondBranch);
6658       break;
6659     case kCall:
6660       InitShortOrLong(offset_size_needed, kCall, kLongCall, kLongCall);
6661       break;
6662     case kBareCondBranch:
6663       if (condition_ != kUncond) {
6664         type_ = kBareCondBranch;
6665         CHECK_LE(offset_size_needed, GetOffsetSize());
6666         break;
6667       }
6668       FALLTHROUGH_INTENDED;
6669     case kBareUncondBranch:
6670       type_ = kBareUncondBranch;
6671       CHECK_LE(offset_size_needed, GetOffsetSize());
6672       break;
6673     case kBareCall:
6674       type_ = kBareCall;
6675       CHECK_LE(offset_size_needed, GetOffsetSize());
6676       break;
6677     case kLabel:
6678       type_ = initial_type;
6679       break;
6680     case kLiteral:
6681     case kLiteralUnsigned:
6682     case kLiteralLong:
6683     case kLiteralFloat:
6684     case kLiteralDouble:
6685       CHECK(!IsResolved());
6686       type_ = initial_type;
6687       break;
6688     default:
6689       LOG(FATAL) << "Unexpected branch type " << enum_cast<uint32_t>(initial_type);
6690       UNREACHABLE();
6691   }
6692 
6693   old_type_ = type_;
6694 }
6695 
IsNop(BranchCondition condition,XRegister lhs,XRegister rhs)6696 bool Riscv64Assembler::Branch::IsNop(BranchCondition condition, XRegister lhs, XRegister rhs) {
6697   switch (condition) {
6698     case kCondNE:
6699     case kCondLT:
6700     case kCondGT:
6701     case kCondLTU:
6702     case kCondGTU:
6703       return lhs == rhs;
6704     default:
6705       return false;
6706   }
6707 }
6708 
IsUncond(BranchCondition condition,XRegister lhs,XRegister rhs)6709 bool Riscv64Assembler::Branch::IsUncond(BranchCondition condition, XRegister lhs, XRegister rhs) {
6710   switch (condition) {
6711     case kUncond:
6712       return true;
6713     case kCondEQ:
6714     case kCondGE:
6715     case kCondLE:
6716     case kCondLEU:
6717     case kCondGEU:
6718       return lhs == rhs;
6719     default:
6720       return false;
6721   }
6722 }
6723 
IsCompressed(Type type)6724 bool Riscv64Assembler::Branch::IsCompressed(Type type) {
6725   switch (type) {
6726     case kCondCBranch:
6727     case kUncondCBranch:
6728     case kBareCondCBranch:
6729     case kBareUncondCBranch:
6730     case kCondCBranch21:
6731       return true;
6732     default:
6733       return false;
6734   }
6735 }
6736 
Branch(uint32_t location,uint32_t target,XRegister rd,bool is_bare,bool compression_allowed)6737 Riscv64Assembler::Branch::Branch(
6738     uint32_t location, uint32_t target, XRegister rd, bool is_bare, bool compression_allowed)
6739     : old_location_(location),
6740       location_(location),
6741       target_(target),
6742       lhs_reg_(rd),
6743       rhs_reg_(Zero),
6744       freg_(kNoFRegister),
6745       condition_(kUncond),
6746       next_branch_id_(0u),
6747       compression_allowed_(compression_allowed) {
6748   InitializeType((rd != Zero ?
6749                       (is_bare ? kBareCall : kCall) :
6750                       (is_bare ? (compression_allowed ? kBareUncondCBranch : kBareUncondBranch) :
6751                                  (compression_allowed ? kUncondCBranch : kUncondBranch))));
6752 }
6753 
Branch(uint32_t location,uint32_t target,Riscv64Assembler::BranchCondition condition,XRegister lhs_reg,XRegister rhs_reg,bool is_bare,bool compression_allowed)6754 Riscv64Assembler::Branch::Branch(uint32_t location,
6755                                  uint32_t target,
6756                                  Riscv64Assembler::BranchCondition condition,
6757                                  XRegister lhs_reg,
6758                                  XRegister rhs_reg,
6759                                  bool is_bare,
6760                                  bool compression_allowed)
6761     : old_location_(location),
6762       location_(location),
6763       target_(target),
6764       lhs_reg_(lhs_reg),
6765       rhs_reg_(rhs_reg),
6766       freg_(kNoFRegister),
6767       condition_(condition),
6768       next_branch_id_(0u) {
6769   DCHECK_NE(condition, kUncond);
6770   DCHECK(!IsNop(condition, lhs_reg, rhs_reg));
6771   DCHECK(!IsUncond(condition, lhs_reg, rhs_reg));
6772   if (!IsCompressableCondition()) {
6773     compression_allowed = false;
6774   }
6775   compression_allowed_ = compression_allowed;
6776   InitializeType(is_bare ? (compression_allowed ? kBareCondCBranch : kBareCondBranch) :
6777                            (compression_allowed ? kCondCBranch : kCondBranch));
6778 }
6779 
Branch(uint32_t location,uint32_t target,XRegister rd,Type label_or_literal_type)6780 Riscv64Assembler::Branch::Branch(uint32_t location,
6781                                  uint32_t target,
6782                                  XRegister rd,
6783                                  Type label_or_literal_type)
6784     : old_location_(location),
6785       location_(location),
6786       target_(target),
6787       lhs_reg_(rd),
6788       rhs_reg_(Zero),
6789       freg_(kNoFRegister),
6790       condition_(kUncond),
6791       next_branch_id_(0u),
6792       compression_allowed_(false) {
6793   CHECK_NE(rd , Zero);
6794   InitializeType(label_or_literal_type);
6795 }
6796 
Branch(uint32_t location,uint32_t target,FRegister rd,Type literal_type)6797 Riscv64Assembler::Branch::Branch(uint32_t location,
6798                                  uint32_t target,
6799                                  FRegister rd,
6800                                  Type literal_type)
6801     : old_location_(location),
6802       location_(location),
6803       target_(target),
6804       lhs_reg_(Zero),
6805       rhs_reg_(Zero),
6806       freg_(rd),
6807       condition_(kUncond),
6808       next_branch_id_(0u),
6809       compression_allowed_(false) {
6810   InitializeType(literal_type);
6811 }
6812 
OppositeCondition(Riscv64Assembler::BranchCondition cond)6813 Riscv64Assembler::BranchCondition Riscv64Assembler::Branch::OppositeCondition(
6814     Riscv64Assembler::BranchCondition cond) {
6815   switch (cond) {
6816     case kCondEQ:
6817       return kCondNE;
6818     case kCondNE:
6819       return kCondEQ;
6820     case kCondLT:
6821       return kCondGE;
6822     case kCondGE:
6823       return kCondLT;
6824     case kCondLE:
6825       return kCondGT;
6826     case kCondGT:
6827       return kCondLE;
6828     case kCondLTU:
6829       return kCondGEU;
6830     case kCondGEU:
6831       return kCondLTU;
6832     case kCondLEU:
6833       return kCondGTU;
6834     case kCondGTU:
6835       return kCondLEU;
6836     case kUncond:
6837       LOG(FATAL) << "Unexpected branch condition " << enum_cast<uint32_t>(cond);
6838       UNREACHABLE();
6839   }
6840 }
6841 
GetType() const6842 Riscv64Assembler::Branch::Type Riscv64Assembler::Branch::GetType() const { return type_; }
6843 
GetOldType() const6844 Riscv64Assembler::Branch::Type Riscv64Assembler::Branch::GetOldType() const { return old_type_; }
6845 
GetCondition() const6846 Riscv64Assembler::BranchCondition Riscv64Assembler::Branch::GetCondition() const {
6847     return condition_;
6848 }
6849 
GetLeftRegister() const6850 XRegister Riscv64Assembler::Branch::GetLeftRegister() const { return lhs_reg_; }
6851 
GetRightRegister() const6852 XRegister Riscv64Assembler::Branch::GetRightRegister() const { return rhs_reg_; }
6853 
GetNonZeroRegister() const6854 XRegister Riscv64Assembler::Branch::GetNonZeroRegister() const {
6855   DCHECK(GetLeftRegister() == Zero || GetRightRegister() == Zero)
6856       << "Either register has to be Zero register";
6857   DCHECK(GetLeftRegister() != Zero || GetRightRegister() != Zero)
6858       << "Either register has to be non-Zero register";
6859   return GetLeftRegister() == Zero ? GetRightRegister() : GetLeftRegister();
6860 }
6861 
GetFRegister() const6862 FRegister Riscv64Assembler::Branch::GetFRegister() const { return freg_; }
6863 
GetTarget() const6864 uint32_t Riscv64Assembler::Branch::GetTarget() const { return target_; }
6865 
GetLocation() const6866 uint32_t Riscv64Assembler::Branch::GetLocation() const { return location_; }
6867 
GetOldLocation() const6868 uint32_t Riscv64Assembler::Branch::GetOldLocation() const { return old_location_; }
6869 
GetLength() const6870 uint32_t Riscv64Assembler::Branch::GetLength() const { return branch_info_[type_].length; }
6871 
GetOldLength() const6872 uint32_t Riscv64Assembler::Branch::GetOldLength() const { return branch_info_[old_type_].length; }
6873 
GetEndLocation() const6874 uint32_t Riscv64Assembler::Branch::GetEndLocation() const { return GetLocation() + GetLength(); }
6875 
GetOldEndLocation() const6876 uint32_t Riscv64Assembler::Branch::GetOldEndLocation() const {
6877   return GetOldLocation() + GetOldLength();
6878 }
6879 
NextBranchId() const6880 uint32_t Riscv64Assembler::Branch::NextBranchId() const { return next_branch_id_; }
6881 
IsBare() const6882 bool Riscv64Assembler::Branch::IsBare() const {
6883   switch (type_) {
6884     case kBareUncondBranch:
6885     case kBareCondBranch:
6886     case kBareCall:
6887       return true;
6888     default:
6889       return false;
6890   }
6891 }
6892 
IsResolved() const6893 bool Riscv64Assembler::Branch::IsResolved() const { return target_ != kUnresolved; }
6894 
IsCompressableCondition() const6895 bool Riscv64Assembler::Branch::IsCompressableCondition() const {
6896   return (condition_ == kCondEQ || condition_ == kCondNE) &&
6897          ((lhs_reg_ == Zero && IsShortReg(rhs_reg_)) || (rhs_reg_ == Zero && IsShortReg(lhs_reg_)));
6898 }
6899 
GetOffsetSize() const6900 Riscv64Assembler::Branch::OffsetBits Riscv64Assembler::Branch::GetOffsetSize() const {
6901   return branch_info_[type_].offset_size;
6902 }
6903 
GetOffsetSizeNeeded(uint32_t location,uint32_t target)6904 Riscv64Assembler::Branch::OffsetBits Riscv64Assembler::Branch::GetOffsetSizeNeeded(
6905     uint32_t location, uint32_t target) {
6906   // For unresolved targets assume the shortest encoding
6907   // (later it will be made longer if needed).
6908   if (target == kUnresolved) {
6909     return kOffset9;
6910   }
6911   int64_t distance = static_cast<int64_t>(target) - location;
6912 
6913   if (IsInt<kOffset9>(distance)) {
6914     return kOffset9;
6915   } else if (IsInt<kOffset12>(distance)) {
6916     return kOffset12;
6917   } else if (IsInt<kOffset13>(distance)) {
6918     return kOffset13;
6919   } else if (IsInt<kOffset21>(distance)) {
6920     return kOffset21;
6921   } else {
6922     return kOffset32;
6923   }
6924 }
6925 
Resolve(uint32_t target)6926 void Riscv64Assembler::Branch::Resolve(uint32_t target) { target_ = target; }
6927 
Relocate(uint32_t expand_location,uint32_t delta)6928 void Riscv64Assembler::Branch::Relocate(uint32_t expand_location, uint32_t delta) {
6929   // All targets should be resolved before we start promoting branches.
6930   DCHECK(IsResolved());
6931   if (location_ > expand_location) {
6932     location_ += delta;
6933   }
6934   if (target_ > expand_location) {
6935     target_ += delta;
6936   }
6937 }
6938 
PromoteIfNeeded()6939 uint32_t Riscv64Assembler::Branch::PromoteIfNeeded() {
6940   // All targets should be resolved before we start promoting branches.
6941   DCHECK(IsResolved());
6942   Type old_type = type_;
6943   switch (type_) {
6944     // Compressed branches (can be promoted to longer)
6945     case kUncondCBranch: {
6946       OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
6947       if (needed_size <= GetOffsetSize()) {
6948         return 0u;
6949       }
6950 
6951       type_ = needed_size <= branch_info_[kUncondBranch].offset_size ? kUncondBranch :
6952                                                                        kLongUncondBranch;
6953       break;
6954     }
6955     case kCondCBranch: {
6956       DCHECK(IsCompressableCondition());
6957       OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
6958       if (needed_size <= GetOffsetSize()) {
6959         return 0u;
6960       }
6961 
6962       if (needed_size <= branch_info_[kCondBranch].offset_size) {
6963         type_ = kCondBranch;
6964         break;
6965       }
6966       FALLTHROUGH_INTENDED;
6967     }
6968     // Short branches (can be promoted to longer).
6969     case kCondBranch: {
6970       OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
6971       if (needed_size <= GetOffsetSize()) {
6972         return 0u;
6973       }
6974 
6975       Type cond21Type = old_type == kCondCBranch ? kCondCBranch21 : kCondBranch21;
6976       if (compression_allowed_ && cond21Type == kCondBranch21 && IsCompressableCondition()) {
6977         // If this branch was promoted from compressed one on initialization stage
6978         // it could be promoted back to compressed if possible
6979         cond21Type = kCondCBranch21;
6980       }
6981 
6982       // The offset remains the same for `kCond[C]Branch21` for forward branches.
6983       DCHECK_EQ(branch_info_[cond21Type].length - branch_info_[cond21Type].pc_offset,
6984                 branch_info_[kCondBranch].length - branch_info_[kCondBranch].pc_offset);
6985       if (target_ <= location_) {
6986         // Calculate the needed size for kCond[C]Branch21.
6987         needed_size = GetOffsetSizeNeeded(location_ + branch_info_[cond21Type].pc_offset, target_);
6988       }
6989       type_ = (needed_size <= branch_info_[cond21Type].offset_size) ? cond21Type : kLongCondBranch;
6990       break;
6991     }
6992     case kUncondBranch:
6993       if (GetOffsetSizeNeeded(GetOffsetLocation(), target_) <= GetOffsetSize()) {
6994         return 0u;
6995       }
6996       type_ = kLongUncondBranch;
6997       break;
6998     case kCall:
6999       if (GetOffsetSizeNeeded(GetOffsetLocation(), target_) <= GetOffsetSize()) {
7000         return 0u;
7001       }
7002       type_ = kLongCall;
7003       break;
7004     // Medium branch (can be promoted to long).
7005     case kCondCBranch21:
7006       DCHECK(IsCompressableCondition());
7007       FALLTHROUGH_INTENDED;
7008     case kCondBranch21: {
7009       OffsetBits needed_size = GetOffsetSizeNeeded(GetOffsetLocation(), target_);
7010       if (needed_size <= GetOffsetSize()) {
7011         return 0u;
7012       }
7013       type_ = kLongCondBranch;
7014       break;
7015     }
7016     default:
7017       // Other branch types cannot be promoted.
7018       DCHECK_LE(GetOffsetSizeNeeded(GetOffsetLocation(), target_), GetOffsetSize())
7019           << static_cast<uint32_t>(type_);
7020       return 0u;
7021   }
7022   DCHECK(type_ != old_type);
7023   DCHECK_GT(branch_info_[type_].length, branch_info_[old_type].length);
7024   return branch_info_[type_].length - branch_info_[old_type].length;
7025 }
7026 
GetOffsetLocation() const7027 uint32_t Riscv64Assembler::Branch::GetOffsetLocation() const {
7028   return location_ + branch_info_[type_].pc_offset;
7029 }
7030 
GetOffset() const7031 int32_t Riscv64Assembler::Branch::GetOffset() const {
7032   CHECK(IsResolved());
7033   // Calculate the byte distance between instructions and also account for
7034   // different PC-relative origins.
7035   uint32_t offset_location = GetOffsetLocation();
7036   int32_t offset = static_cast<int32_t>(target_ - offset_location);
7037   DCHECK_EQ(offset, static_cast<int64_t>(target_) - static_cast<int64_t>(offset_location));
7038   return offset;
7039 }
7040 
LinkToList(uint32_t next_branch_id)7041 void Riscv64Assembler::Branch::LinkToList(uint32_t next_branch_id) {
7042   next_branch_id_ = next_branch_id;
7043 }
7044 
EmitBcond(BranchCondition cond,XRegister rs,XRegister rt,int32_t offset)7045 void Riscv64Assembler::EmitBcond(BranchCondition cond,
7046                                  XRegister rs,
7047                                  XRegister rt,
7048                                  int32_t offset) {
7049   switch (cond) {
7050 #define DEFINE_CASE(COND, cond) \
7051     case kCond##COND:           \
7052       B##cond(rs, rt, offset);  \
7053       break;
7054     DEFINE_CASE(EQ, eq)
7055     DEFINE_CASE(NE, ne)
7056     DEFINE_CASE(LT, lt)
7057     DEFINE_CASE(GE, ge)
7058     DEFINE_CASE(LE, le)
7059     DEFINE_CASE(GT, gt)
7060     DEFINE_CASE(LTU, ltu)
7061     DEFINE_CASE(GEU, geu)
7062     DEFINE_CASE(LEU, leu)
7063     DEFINE_CASE(GTU, gtu)
7064 #undef DEFINE_CASE
7065     case kUncond:
7066       LOG(FATAL) << "Unexpected branch condition " << enum_cast<uint32_t>(cond);
7067       UNREACHABLE();
7068   }
7069 }
7070 
EmitBranch(Riscv64Assembler::Branch * branch)7071 void Riscv64Assembler::EmitBranch(Riscv64Assembler::Branch* branch) {
7072   CHECK(overwriting_);
7073   overwrite_location_ = branch->GetLocation();
7074   const int32_t offset = branch->GetOffset();
7075   BranchCondition condition = branch->GetCondition();
7076   XRegister lhs = branch->GetLeftRegister();
7077   XRegister rhs = branch->GetRightRegister();
7078   // Disable Compressed emitter explicitly and enable where it is needed
7079   ScopedNoCInstructions no_compression(this);
7080 
7081   auto emit_auipc_and_next = [&](XRegister reg, auto next) {
7082     CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7083     auto [imm20, short_offset] = SplitOffset(offset);
7084     Auipc(reg, imm20);
7085     next(short_offset);
7086   };
7087 
7088   switch (branch->GetType()) {
7089     // Compressed branches
7090     case Branch::kCondCBranch:
7091     case Branch::kBareCondCBranch: {
7092       ScopedUseCInstructions use_compression(this);
7093       CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7094       DCHECK(branch->IsCompressableCondition());
7095       if (condition == kCondEQ) {
7096         CBeqz(branch->GetNonZeroRegister(), offset);
7097       } else {
7098         CBnez(branch->GetNonZeroRegister(), offset);
7099       }
7100       break;
7101     }
7102     case Branch::kUncondCBranch:
7103     case Branch::kBareUncondCBranch: {
7104       ScopedUseCInstructions use_compression(this);
7105       CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7106       CJ(offset);
7107       break;
7108     }
7109     // Short branches.
7110     case Branch::kUncondBranch:
7111     case Branch::kBareUncondBranch:
7112       CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7113       J(offset);
7114       break;
7115     case Branch::kCondBranch:
7116     case Branch::kBareCondBranch:
7117       CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7118       EmitBcond(condition, lhs, rhs, offset);
7119       break;
7120     case Branch::kCall:
7121     case Branch::kBareCall:
7122       CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7123       DCHECK(lhs != Zero);
7124       Jal(lhs, offset);
7125       break;
7126 
7127     // Medium branch.
7128     case Branch::kCondBranch21:
7129       EmitBcond(Branch::OppositeCondition(condition), lhs, rhs, branch->GetLength());
7130       CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7131       J(offset);
7132       break;
7133     case Branch::kCondCBranch21: {
7134       DCHECK(branch->IsCompressableCondition());
7135       {
7136         ScopedUseCInstructions use_compression(this);
7137         if (condition == kCondNE) {
7138           DCHECK_EQ(Branch::OppositeCondition(condition), kCondEQ);
7139           CBeqz(branch->GetNonZeroRegister(), branch->GetLength());
7140         } else {
7141           DCHECK_EQ(Branch::OppositeCondition(condition), kCondNE);
7142           CBnez(branch->GetNonZeroRegister(), branch->GetLength());
7143         }
7144       }
7145       CHECK_EQ(overwrite_location_, branch->GetOffsetLocation());
7146       J(offset);
7147       break;
7148     }
7149     // Long branches.
7150     case Branch::kLongCondBranch:
7151       EmitBcond(Branch::OppositeCondition(condition), lhs, rhs, branch->GetLength());
7152       FALLTHROUGH_INTENDED;
7153     case Branch::kLongUncondBranch:
7154       emit_auipc_and_next(TMP, [&](int32_t short_offset) { Jalr(Zero, TMP, short_offset); });
7155       break;
7156     case Branch::kLongCall:
7157       DCHECK(lhs != Zero);
7158       emit_auipc_and_next(lhs, [&](int32_t short_offset) { Jalr(lhs, lhs, short_offset); });
7159       break;
7160 
7161     // label.
7162     case Branch::kLabel:
7163       emit_auipc_and_next(lhs, [&](int32_t short_offset) { Addi(lhs, lhs, short_offset); });
7164       break;
7165     // literals.
7166     case Branch::kLiteral:
7167       emit_auipc_and_next(lhs, [&](int32_t short_offset) { Lw(lhs, lhs, short_offset); });
7168       break;
7169     case Branch::kLiteralUnsigned:
7170       emit_auipc_and_next(lhs, [&](int32_t short_offset) { Lwu(lhs, lhs, short_offset); });
7171       break;
7172     case Branch::kLiteralLong:
7173       emit_auipc_and_next(lhs, [&](int32_t short_offset) { Ld(lhs, lhs, short_offset); });
7174       break;
7175     case Branch::kLiteralFloat:
7176       emit_auipc_and_next(
7177           TMP, [&](int32_t short_offset) { FLw(branch->GetFRegister(), TMP, short_offset); });
7178       break;
7179     case Branch::kLiteralDouble:
7180       emit_auipc_and_next(
7181           TMP, [&](int32_t short_offset) { FLd(branch->GetFRegister(), TMP, short_offset); });
7182       break;
7183   }
7184   CHECK_EQ(overwrite_location_, branch->GetEndLocation());
7185   CHECK_LE(branch->GetLength(), static_cast<uint32_t>(Branch::kMaxBranchLength));
7186 }
7187 
EmitBranches()7188 void Riscv64Assembler::EmitBranches() {
7189   CHECK(!overwriting_);
7190   // Switch from appending instructions at the end of the buffer to overwriting
7191   // existing instructions (branch placeholders) in the buffer.
7192   overwriting_ = true;
7193   for (auto& branch : branches_) {
7194     EmitBranch(&branch);
7195   }
7196   overwriting_ = false;
7197 }
7198 
FinalizeLabeledBranch(Riscv64Label * label)7199 void Riscv64Assembler::FinalizeLabeledBranch(Riscv64Label* label) {
7200   const uint32_t alignment =
7201       IsExtensionEnabled(Riscv64Extension::kZca) ? sizeof(uint16_t) : sizeof(uint32_t);
7202   Branch& this_branch = branches_.back();
7203   uint32_t branch_length = this_branch.GetLength();
7204   DCHECK(IsAlignedParam(branch_length, alignment));
7205   uint32_t length = branch_length / alignment;
7206   if (!label->IsBound()) {
7207     // Branch forward (to a following label), distance is unknown.
7208     // The first branch forward will contain 0, serving as the terminator of
7209     // the list of forward-reaching branches.
7210     this_branch.LinkToList(label->position_);
7211     // Now make the label object point to this branch
7212     // (this forms a linked list of branches preceding this label).
7213     uint32_t branch_id = branches_.size() - 1;
7214     label->LinkTo(branch_id);
7215   }
7216   // Reserve space for the branch.
7217   for (; length != 0u; --length) {
7218     if (alignment == sizeof(uint16_t)) {
7219       Emit16(0);
7220     } else {
7221       Emit32(0);
7222     }
7223   }
7224 }
7225 
Bcond(Riscv64Label * label,bool is_bare,BranchCondition condition,XRegister lhs,XRegister rhs)7226 void Riscv64Assembler::Bcond(
7227     Riscv64Label* label, bool is_bare, BranchCondition condition, XRegister lhs, XRegister rhs) {
7228   // TODO(riscv64): Should an assembler perform these optimizations, or should we remove them?
7229   // If lhs = rhs, this can be a NOP.
7230   if (Branch::IsNop(condition, lhs, rhs)) {
7231     return;
7232   }
7233   if (Branch::IsUncond(condition, lhs, rhs)) {
7234     Buncond(label, Zero, is_bare);
7235     return;
7236   }
7237 
7238   uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
7239   branches_.emplace_back(buffer_.Size(),
7240                          target,
7241                          condition,
7242                          lhs,
7243                          rhs,
7244                          is_bare,
7245                          IsExtensionEnabled(Riscv64Extension::kZca));
7246   FinalizeLabeledBranch(label);
7247 }
7248 
Buncond(Riscv64Label * label,XRegister rd,bool is_bare)7249 void Riscv64Assembler::Buncond(Riscv64Label* label, XRegister rd, bool is_bare) {
7250   uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
7251   branches_.emplace_back(
7252       buffer_.Size(), target, rd, is_bare, IsExtensionEnabled(Riscv64Extension::kZca));
7253   FinalizeLabeledBranch(label);
7254 }
7255 
7256 template <typename XRegisterOrFRegister>
LoadLiteral(Literal * literal,XRegisterOrFRegister rd,Branch::Type literal_type)7257 void Riscv64Assembler::LoadLiteral(Literal* literal,
7258                                    XRegisterOrFRegister rd,
7259                                    Branch::Type literal_type) {
7260   Riscv64Label* label = literal->GetLabel();
7261   DCHECK(!label->IsBound());
7262   branches_.emplace_back(buffer_.Size(), Branch::kUnresolved, rd, literal_type);
7263   FinalizeLabeledBranch(label);
7264 }
7265 
GetBranch(uint32_t branch_id)7266 Riscv64Assembler::Branch* Riscv64Assembler::GetBranch(uint32_t branch_id) {
7267   CHECK_LT(branch_id, branches_.size());
7268   return &branches_[branch_id];
7269 }
7270 
GetBranch(uint32_t branch_id) const7271 const Riscv64Assembler::Branch* Riscv64Assembler::GetBranch(uint32_t branch_id) const {
7272   CHECK_LT(branch_id, branches_.size());
7273   return &branches_[branch_id];
7274 }
7275 
Bind(Riscv64Label * label)7276 void Riscv64Assembler::Bind(Riscv64Label* label) {
7277   CHECK(!label->IsBound());
7278   uint32_t bound_pc = buffer_.Size();
7279 
7280   // Walk the list of branches referring to and preceding this label.
7281   // Store the previously unknown target addresses in them.
7282   while (label->IsLinked()) {
7283     uint32_t branch_id = label->Position();
7284     Branch* branch = GetBranch(branch_id);
7285     branch->Resolve(bound_pc);
7286     // On to the next branch in the list...
7287     label->position_ = branch->NextBranchId();
7288   }
7289 
7290   // Now make the label object contain its own location (relative to the end of the preceding
7291   // branch, if any; it will be used by the branches referring to and following this label).
7292   uint32_t prev_branch_id = Riscv64Label::kNoPrevBranchId;
7293   if (!branches_.empty()) {
7294     prev_branch_id = branches_.size() - 1u;
7295     const Branch* prev_branch = GetBranch(prev_branch_id);
7296     bound_pc -= prev_branch->GetEndLocation();
7297   }
7298   label->prev_branch_id_ = prev_branch_id;
7299   label->BindTo(bound_pc);
7300 }
7301 
LoadLabelAddress(XRegister rd,Riscv64Label * label)7302 void Riscv64Assembler::LoadLabelAddress(XRegister rd, Riscv64Label* label) {
7303   DCHECK_NE(rd, Zero);
7304   uint32_t target = label->IsBound() ? GetLabelLocation(label) : Branch::kUnresolved;
7305   branches_.emplace_back(buffer_.Size(), target, rd, Branch::kLabel);
7306   FinalizeLabeledBranch(label);
7307 }
7308 
NewLiteral(size_t size,const uint8_t * data)7309 Literal* Riscv64Assembler::NewLiteral(size_t size, const uint8_t* data) {
7310   // We don't support byte and half-word literals.
7311   if (size == 4u) {
7312     literals_.emplace_back(size, data);
7313     return &literals_.back();
7314   } else {
7315     DCHECK_EQ(size, 8u);
7316     long_literals_.emplace_back(size, data);
7317     return &long_literals_.back();
7318   }
7319 }
7320 
CreateJumpTable(ArenaVector<Riscv64Label * > && labels)7321 JumpTable* Riscv64Assembler::CreateJumpTable(ArenaVector<Riscv64Label*>&& labels) {
7322   jump_tables_.emplace_back(std::move(labels));
7323   JumpTable* table = &jump_tables_.back();
7324   DCHECK(!table->GetLabel()->IsBound());
7325   return table;
7326 }
7327 
GetLabelLocation(const Riscv64Label * label) const7328 uint32_t Riscv64Assembler::GetLabelLocation(const Riscv64Label* label) const {
7329   CHECK(label->IsBound());
7330   uint32_t target = label->Position();
7331   if (label->prev_branch_id_ != Riscv64Label::kNoPrevBranchId) {
7332     // Get label location based on the branch preceding it.
7333     const Branch* prev_branch = GetBranch(label->prev_branch_id_);
7334     target += prev_branch->GetEndLocation();
7335   }
7336   return target;
7337 }
7338 
GetAdjustedPosition(uint32_t old_position)7339 uint32_t Riscv64Assembler::GetAdjustedPosition(uint32_t old_position) {
7340   // We can reconstruct the adjustment by going through all the branches from the beginning
7341   // up to the `old_position`. Since we expect `GetAdjustedPosition()` to be called in a loop
7342   // with increasing `old_position`, we can use the data from last `GetAdjustedPosition()` to
7343   // continue where we left off and the whole loop should be O(m+n) where m is the number
7344   // of positions to adjust and n is the number of branches.
7345   if (old_position < last_old_position_) {
7346     last_position_adjustment_ = 0;
7347     last_old_position_ = 0;
7348     last_branch_id_ = 0;
7349   }
7350   while (last_branch_id_ != branches_.size()) {
7351     const Branch* branch = GetBranch(last_branch_id_);
7352     if (branch->GetLocation() >= old_position + last_position_adjustment_) {
7353       break;
7354     }
7355     last_position_adjustment_ += branch->GetLength() - branch->GetOldLength();
7356     ++last_branch_id_;
7357   }
7358   last_old_position_ = old_position;
7359   return old_position + last_position_adjustment_;
7360 }
7361 
ReserveJumpTableSpace()7362 void Riscv64Assembler::ReserveJumpTableSpace() {
7363   if (!jump_tables_.empty()) {
7364     for (JumpTable& table : jump_tables_) {
7365       Riscv64Label* label = table.GetLabel();
7366       Bind(label);
7367 
7368       // Bulk ensure capacity, as this may be large.
7369       size_t orig_size = buffer_.Size();
7370       size_t required_capacity = orig_size + table.GetSize();
7371       if (required_capacity > buffer_.Capacity()) {
7372         buffer_.ExtendCapacity(required_capacity);
7373       }
7374 #ifndef NDEBUG
7375       buffer_.has_ensured_capacity_ = true;
7376 #endif
7377 
7378       // Fill the space with placeholder data as the data is not final
7379       // until the branches have been promoted. And we shouldn't
7380       // be moving uninitialized data during branch promotion.
7381       for (size_t cnt = table.GetData().size(), i = 0; i < cnt; ++i) {
7382         buffer_.Emit<uint32_t>(0x1abe1234u);
7383       }
7384 
7385 #ifndef NDEBUG
7386       buffer_.has_ensured_capacity_ = false;
7387 #endif
7388     }
7389   }
7390 }
7391 
PromoteBranches()7392 void Riscv64Assembler::PromoteBranches() {
7393   // Promote short branches to long as necessary.
7394   bool changed;
7395   // To avoid re-computing predicate on each iteration cache it in local
7396   do {
7397     changed = false;
7398     for (auto& branch : branches_) {
7399       CHECK(branch.IsResolved());
7400       uint32_t delta = branch.PromoteIfNeeded();
7401       // If this branch has been promoted and needs to expand in size,
7402       // relocate all branches by the expansion size.
7403       if (delta != 0u) {
7404         changed = true;
7405         uint32_t expand_location = branch.GetLocation();
7406         for (auto& branch2 : branches_) {
7407           branch2.Relocate(expand_location, delta);
7408         }
7409       }
7410     }
7411   } while (changed);
7412 
7413   // Account for branch expansion by resizing the code buffer
7414   // and moving the code in it to its final location.
7415   size_t branch_count = branches_.size();
7416   if (branch_count > 0) {
7417     // Resize.
7418     Branch& last_branch = branches_[branch_count - 1];
7419     uint32_t size_delta = last_branch.GetEndLocation() - last_branch.GetOldEndLocation();
7420     uint32_t old_size = buffer_.Size();
7421     buffer_.Resize(old_size + size_delta);
7422     // Move the code residing between branch placeholders.
7423     uint32_t end = old_size;
7424     for (size_t i = branch_count; i > 0;) {
7425       Branch& branch = branches_[--i];
7426       uint32_t size = end - branch.GetOldEndLocation();
7427       buffer_.Move(branch.GetEndLocation(), branch.GetOldEndLocation(), size);
7428       end = branch.GetOldLocation();
7429     }
7430   }
7431 
7432   // Align 64-bit literals by moving them up by 4 bytes if needed.
7433   // This can increase the PC-relative distance but all literals are accessed with AUIPC+Load(imm12)
7434   // without branch promotion, so this late adjustment cannot take them out of instruction range.
7435   if (!long_literals_.empty()) {
7436     uint32_t first_literal_location = GetLabelLocation(long_literals_.front().GetLabel());
7437     size_t lit_size = long_literals_.size() * sizeof(uint64_t);
7438     size_t buf_size = buffer_.Size();
7439     // 64-bit literals must be at the very end of the buffer.
7440     CHECK_EQ(first_literal_location + lit_size, buf_size);
7441     if (!IsAligned<sizeof(uint64_t)>(first_literal_location)) {
7442       // Insert the padding.
7443       buffer_.Resize(buf_size + sizeof(uint32_t));
7444       buffer_.Move(first_literal_location + sizeof(uint32_t), first_literal_location, lit_size);
7445       DCHECK(!overwriting_);
7446       overwriting_ = true;
7447       overwrite_location_ = first_literal_location;
7448       Emit32(0);  // Illegal instruction.
7449       overwriting_ = false;
7450       // Increase target addresses in literal and address loads by 4 bytes in order for correct
7451       // offsets from PC to be generated.
7452       for (auto& branch : branches_) {
7453         uint32_t target = branch.GetTarget();
7454         if (target >= first_literal_location) {
7455           branch.Resolve(target + sizeof(uint32_t));
7456         }
7457       }
7458       // If after this we ever call GetLabelLocation() to get the location of a 64-bit literal,
7459       // we need to adjust the location of the literal's label as well.
7460       for (Literal& literal : long_literals_) {
7461         // Bound label's position is negative, hence decrementing it instead of incrementing.
7462         literal.GetLabel()->position_ -= sizeof(uint32_t);
7463       }
7464     }
7465   }
7466 }
7467 
PatchCFI()7468 void Riscv64Assembler::PatchCFI() {
7469   if (cfi().NumberOfDelayedAdvancePCs() == 0u) {
7470     return;
7471   }
7472 
7473   using DelayedAdvancePC = DebugFrameOpCodeWriterForAssembler::DelayedAdvancePC;
7474   const auto data = cfi().ReleaseStreamAndPrepareForDelayedAdvancePC();
7475   const std::vector<uint8_t>& old_stream = data.first;
7476   const std::vector<DelayedAdvancePC>& advances = data.second;
7477 
7478   // Refill our data buffer with patched opcodes.
7479   static constexpr size_t kExtraSpace = 16;  // Not every PC advance can be encoded in one byte.
7480   cfi().ReserveCFIStream(old_stream.size() + advances.size() + kExtraSpace);
7481   size_t stream_pos = 0;
7482   for (const DelayedAdvancePC& advance : advances) {
7483     DCHECK_GE(advance.stream_pos, stream_pos);
7484     // Copy old data up to the point where advance was issued.
7485     cfi().AppendRawData(old_stream, stream_pos, advance.stream_pos);
7486     stream_pos = advance.stream_pos;
7487     // Insert the advance command with its final offset.
7488     size_t final_pc = GetAdjustedPosition(advance.pc);
7489     cfi().AdvancePC(final_pc);
7490   }
7491   // Copy the final segment if any.
7492   cfi().AppendRawData(old_stream, stream_pos, old_stream.size());
7493 }
7494 
EmitJumpTables()7495 void Riscv64Assembler::EmitJumpTables() {
7496   if (!jump_tables_.empty()) {
7497     CHECK(!overwriting_);
7498     // Switch from appending instructions at the end of the buffer to overwriting
7499     // existing instructions (here, jump tables) in the buffer.
7500     overwriting_ = true;
7501 
7502     for (JumpTable& table : jump_tables_) {
7503       Riscv64Label* table_label = table.GetLabel();
7504       uint32_t start = GetLabelLocation(table_label);
7505       overwrite_location_ = start;
7506 
7507       for (Riscv64Label* target : table.GetData()) {
7508         CHECK_EQ(buffer_.Load<uint32_t>(overwrite_location_), 0x1abe1234u);
7509         // The table will contain target addresses relative to the table start.
7510         uint32_t offset = GetLabelLocation(target) - start;
7511         Emit32(offset);
7512       }
7513     }
7514 
7515     overwriting_ = false;
7516   }
7517 }
7518 
EmitLiterals()7519 void Riscv64Assembler::EmitLiterals() {
7520   if (!literals_.empty()) {
7521     for (Literal& literal : literals_) {
7522       Riscv64Label* label = literal.GetLabel();
7523       Bind(label);
7524       AssemblerBuffer::EnsureCapacity ensured(&buffer_);
7525       DCHECK_EQ(literal.GetSize(), 4u);
7526       for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
7527         buffer_.Emit<uint8_t>(literal.GetData()[i]);
7528       }
7529     }
7530   }
7531   if (!long_literals_.empty()) {
7532     // These need to be 8-byte-aligned but we shall add the alignment padding after the branch
7533     // promotion, if needed. Since all literals are accessed with AUIPC+Load(imm12) without branch
7534     // promotion, this late adjustment cannot take long literals out of instruction range.
7535     for (Literal& literal : long_literals_) {
7536       Riscv64Label* label = literal.GetLabel();
7537       Bind(label);
7538       AssemblerBuffer::EnsureCapacity ensured(&buffer_);
7539       DCHECK_EQ(literal.GetSize(), 8u);
7540       for (size_t i = 0, size = literal.GetSize(); i != size; ++i) {
7541         buffer_.Emit<uint8_t>(literal.GetData()[i]);
7542       }
7543     }
7544   }
7545 }
7546 
7547 // This method is used to adjust the base register and offset pair for
7548 // a load/store when the offset doesn't fit into 12-bit signed integer.
AdjustBaseAndOffset(XRegister & base,int32_t & offset,ScratchRegisterScope & srs)7549 void Riscv64Assembler::AdjustBaseAndOffset(XRegister& base,
7550                                            int32_t& offset,
7551                                            ScratchRegisterScope& srs) {
7552   // A scratch register must be available for adjustment even if it's not needed.
7553   CHECK_NE(srs.AvailableXRegisters(), 0u);
7554   if (IsInt<12>(offset)) {
7555     return;
7556   }
7557 
7558   constexpr int32_t kPositiveOffsetMaxSimpleAdjustment = 0x7ff;
7559   constexpr int32_t kHighestOffsetForSimpleAdjustment = 2 * kPositiveOffsetMaxSimpleAdjustment;
7560   constexpr int32_t kPositiveOffsetSimpleAdjustmentAligned8 =
7561       RoundDown(kPositiveOffsetMaxSimpleAdjustment, 8);
7562   constexpr int32_t kPositiveOffsetSimpleAdjustmentAligned4 =
7563       RoundDown(kPositiveOffsetMaxSimpleAdjustment, 4);
7564   constexpr int32_t kNegativeOffsetSimpleAdjustment = -0x800;
7565   constexpr int32_t kLowestOffsetForSimpleAdjustment = 2 * kNegativeOffsetSimpleAdjustment;
7566 
7567   XRegister tmp = srs.AllocateXRegister();
7568   if (offset >= 0 && offset <= kHighestOffsetForSimpleAdjustment) {
7569     // Make the adjustment 8-byte aligned (0x7f8) except for offsets that cannot be reached
7570     // with this adjustment, then try 4-byte alignment, then just half of the offset.
7571     int32_t adjustment = IsInt<12>(offset - kPositiveOffsetSimpleAdjustmentAligned8)
7572         ? kPositiveOffsetSimpleAdjustmentAligned8
7573         : IsInt<12>(offset - kPositiveOffsetSimpleAdjustmentAligned4)
7574             ? kPositiveOffsetSimpleAdjustmentAligned4
7575             : offset / 2;
7576     DCHECK(IsInt<12>(adjustment));
7577     Addi(tmp, base, adjustment);
7578     offset -= adjustment;
7579   } else if (offset < 0 && offset >= kLowestOffsetForSimpleAdjustment) {
7580     Addi(tmp, base, kNegativeOffsetSimpleAdjustment);
7581     offset -= kNegativeOffsetSimpleAdjustment;
7582   } else if (offset >= 0x7ffff800) {
7583     // Support even large offsets outside the range supported by `SplitOffset()`.
7584     LoadConst32(tmp, offset);
7585     Add(tmp, tmp, base);
7586     offset = 0;
7587   } else {
7588     auto [imm20, short_offset] = SplitOffset(offset);
7589     Lui(tmp, imm20);
7590     Add(tmp, tmp, base);
7591     offset = short_offset;
7592   }
7593   base = tmp;
7594 }
7595 
7596 template <void (Riscv64Assembler::*insn)(XRegister, XRegister, int32_t)>
LoadFromOffset(XRegister rd,XRegister rs1,int32_t offset)7597 void Riscv64Assembler::LoadFromOffset(XRegister rd, XRegister rs1, int32_t offset) {
7598   CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7599   CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
7600   ScratchRegisterScope srs(this);
7601   // If `rd` differs from `rs1`, allow using it as a temporary if needed.
7602   if (rd != rs1) {
7603     srs.IncludeXRegister(rd);
7604   }
7605   AdjustBaseAndOffset(rs1, offset, srs);
7606   (this->*insn)(rd, rs1, offset);
7607 }
7608 
7609 template <void (Riscv64Assembler::*insn)(XRegister, XRegister, int32_t)>
StoreToOffset(XRegister rs2,XRegister rs1,int32_t offset)7610 void Riscv64Assembler::StoreToOffset(XRegister rs2, XRegister rs1, int32_t offset) {
7611   CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7612   CHECK_EQ((1u << rs2) & available_scratch_core_registers_, 0u);
7613   ScratchRegisterScope srs(this);
7614   AdjustBaseAndOffset(rs1, offset, srs);
7615   (this->*insn)(rs2, rs1, offset);
7616 }
7617 
7618 template <void (Riscv64Assembler::*insn)(FRegister, XRegister, int32_t)>
FLoadFromOffset(FRegister rd,XRegister rs1,int32_t offset)7619 void Riscv64Assembler::FLoadFromOffset(FRegister rd, XRegister rs1, int32_t offset) {
7620   CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7621   ScratchRegisterScope srs(this);
7622   AdjustBaseAndOffset(rs1, offset, srs);
7623   (this->*insn)(rd, rs1, offset);
7624 }
7625 
7626 template <void (Riscv64Assembler::*insn)(FRegister, XRegister, int32_t)>
FStoreToOffset(FRegister rs2,XRegister rs1,int32_t offset)7627 void Riscv64Assembler::FStoreToOffset(FRegister rs2, XRegister rs1, int32_t offset) {
7628   CHECK_EQ((1u << rs1) & available_scratch_core_registers_, 0u);
7629   ScratchRegisterScope srs(this);
7630   AdjustBaseAndOffset(rs1, offset, srs);
7631   (this->*insn)(rs2, rs1, offset);
7632 }
7633 
LoadImmediate(XRegister rd,int64_t imm,bool can_use_tmp)7634 void Riscv64Assembler::LoadImmediate(XRegister rd, int64_t imm, bool can_use_tmp) {
7635   CHECK_EQ((1u << rd) & available_scratch_core_registers_, 0u);
7636   ScratchRegisterScope srs(this);
7637   CHECK_IMPLIES(can_use_tmp, srs.AvailableXRegisters() != 0u);
7638 
7639   // Helper lambdas.
7640   auto addi = [&](XRegister rd, XRegister rs, int32_t imm) { Addi(rd, rs, imm); };
7641   auto addiw = [&](XRegister rd, XRegister rs, int32_t imm) { Addiw(rd, rs, imm); };
7642   auto slli = [&](XRegister rd, XRegister rs, int32_t imm) { Slli(rd, rs, imm); };
7643   auto lui = [&](XRegister rd, uint32_t imm20) { Lui(rd, imm20); };
7644 
7645   // Simple LUI+ADDI/W can handle value range [-0x80000800, 0x7fffffff].
7646   auto is_simple_li_value = [](int64_t value) {
7647     return value >= INT64_C(-0x80000800) && value <= INT64_C(0x7fffffff);
7648   };
7649   auto emit_simple_li_helper = [&](XRegister rd,
7650                                    int64_t value,
7651                                    auto&& addi,
7652                                    auto&& addiw,
7653                                    auto&& slli,
7654                                    auto&& lui) {
7655     DCHECK(is_simple_li_value(value)) << "0x" << std::hex << value;
7656     if (IsInt<12>(value)) {
7657       addi(rd, Zero, value);
7658     } else if (CTZ(value) < 12 && IsInt(6 + CTZ(value), value)) {
7659       // This path yields two 16-bit instructions with the "C" Standard Extension.
7660       addi(rd, Zero, value >> CTZ(value));
7661       slli(rd, rd, CTZ(value));
7662     } else if (value < INT64_C(-0x80000000)) {
7663       int32_t small_value = dchecked_integral_cast<int32_t>(value - INT64_C(-0x80000000));
7664       DCHECK(IsInt<12>(small_value));
7665       DCHECK_LT(small_value, 0);
7666       lui(rd, 1u << 19);
7667       addi(rd, rd, small_value);
7668     } else {
7669       DCHECK(IsInt<32>(value));
7670       // Note: Similar to `SplitOffset()` but we can target the full 32-bit range with ADDIW.
7671       int64_t near_value = (value + 0x800) & ~0xfff;
7672       int32_t small_value = value - near_value;
7673       DCHECK(IsInt<12>(small_value));
7674       uint32_t imm20 = static_cast<uint32_t>(near_value) >> 12;
7675       DCHECK_NE(imm20, 0u);  // Small values are handled above.
7676       lui(rd, imm20);
7677       if (small_value != 0) {
7678         addiw(rd, rd, small_value);
7679       }
7680     }
7681   };
7682   auto emit_simple_li = [&](XRegister rd, int64_t value) {
7683     emit_simple_li_helper(rd, value, addi, addiw, slli, lui);
7684   };
7685   auto count_simple_li_instructions = [&](int64_t value) {
7686     size_t num_instructions = 0u;
7687     auto count_rri = [&](XRegister, XRegister, int32_t) { ++num_instructions; };
7688     auto count_ru = [&](XRegister, uint32_t) { ++num_instructions; };
7689     emit_simple_li_helper(Zero, value, count_rri, count_rri, count_rri, count_ru);
7690     return num_instructions;
7691   };
7692 
7693   // If LUI+ADDI/W is not enough, we can generate up to 3 SLLI+ADDI afterwards (up to 8 instructions
7694   // total). The ADDI from the first SLLI+ADDI pair can be a no-op.
7695   auto emit_with_slli_addi_helper = [&](XRegister rd,
7696                                         int64_t value,
7697                                         auto&& addi,
7698                                         auto&& addiw,
7699                                         auto&& slli,
7700                                         auto&& lui) {
7701     static constexpr size_t kMaxNumSllAddi = 3u;
7702     int32_t addi_values[kMaxNumSllAddi];
7703     size_t sll_shamts[kMaxNumSllAddi];
7704     size_t num_sll_addi = 0u;
7705     while (!is_simple_li_value(value)) {
7706       DCHECK_LT(num_sll_addi, kMaxNumSllAddi);
7707       // Prepare sign-extended low 12 bits for ADDI.
7708       int64_t addi_value = (value & 0xfff) - ((value & 0x800) << 1);
7709       DCHECK(IsInt<12>(addi_value));
7710       int64_t remaining = value - addi_value;
7711       size_t shamt = CTZ(remaining);
7712       DCHECK_GE(shamt, 12u);
7713       addi_values[num_sll_addi] = addi_value;
7714       sll_shamts[num_sll_addi] = shamt;
7715       value = remaining >> shamt;
7716       ++num_sll_addi;
7717     }
7718     if (num_sll_addi != 0u && IsInt<20>(value) && !IsInt<12>(value)) {
7719       // If `sll_shamts[num_sll_addi - 1u]` was only 12, we would have stopped
7720       // the decomposition a step earlier with smaller `num_sll_addi`.
7721       DCHECK_GT(sll_shamts[num_sll_addi - 1u], 12u);
7722       // Emit the signed 20-bit value with LUI and reduce the SLLI shamt by 12 to compensate.
7723       sll_shamts[num_sll_addi - 1u] -= 12u;
7724       lui(rd, dchecked_integral_cast<uint32_t>(value & 0xfffff));
7725     } else {
7726       emit_simple_li_helper(rd, value, addi, addiw, slli, lui);
7727     }
7728     for (size_t i = num_sll_addi; i != 0u; ) {
7729       --i;
7730       slli(rd, rd, sll_shamts[i]);
7731       if (addi_values[i] != 0) {
7732         addi(rd, rd, addi_values[i]);
7733       }
7734     }
7735   };
7736   auto emit_with_slli_addi = [&](XRegister rd, int64_t value) {
7737     emit_with_slli_addi_helper(rd, value, addi, addiw, slli, lui);
7738   };
7739   auto count_instructions_with_slli_addi = [&](int64_t value) {
7740     size_t num_instructions = 0u;
7741     auto count_rri = [&](XRegister, XRegister, int32_t) { ++num_instructions; };
7742     auto count_ru = [&](XRegister, uint32_t) { ++num_instructions; };
7743     emit_with_slli_addi_helper(Zero, value, count_rri, count_rri, count_rri, count_ru);
7744     return num_instructions;
7745   };
7746 
7747   size_t insns_needed = count_instructions_with_slli_addi(imm);
7748   size_t trailing_slli_shamt = 0u;
7749   if (insns_needed > 2u) {
7750     // Sometimes it's better to end with a SLLI even when the above code would end with ADDI.
7751     if ((imm & 1) == 0 && (imm & 0xfff) != 0) {
7752       int64_t value = imm >> CTZ(imm);
7753       size_t new_insns_needed = count_instructions_with_slli_addi(value) + /*SLLI*/ 1u;
7754       DCHECK_GT(new_insns_needed, 2u);
7755       if (insns_needed > new_insns_needed) {
7756         insns_needed = new_insns_needed;
7757         trailing_slli_shamt = CTZ(imm);
7758       }
7759     }
7760 
7761     // Sometimes we can emit a shorter sequence that ends with SRLI.
7762     if (imm > 0) {
7763       size_t shamt = CLZ(static_cast<uint64_t>(imm));
7764       DCHECK_LE(shamt, 32u);  // Otherwise we would not get here as `insns_needed` would be <= 2.
7765       if (imm == dchecked_integral_cast<int64_t>(MaxInt<uint64_t>(64 - shamt))) {
7766         Addi(rd, Zero, -1);
7767         Srli(rd, rd, shamt);
7768         return;
7769       }
7770 
7771       int64_t value = static_cast<int64_t>(static_cast<uint64_t>(imm) << shamt);
7772       DCHECK_LT(value, 0);
7773       if (is_simple_li_value(value)){
7774         size_t new_insns_needed = count_simple_li_instructions(value) + /*SRLI*/ 1u;
7775         // In case of equal number of instructions, clang prefers the sequence without SRLI.
7776         if (new_insns_needed < insns_needed) {
7777           // If we emit ADDI, we set low bits that shall be shifted out to one in line with clang,
7778           // effectively choosing to emit the negative constant closest to zero.
7779           int32_t shifted_out = dchecked_integral_cast<int32_t>(MaxInt<uint32_t>(shamt));
7780           DCHECK_EQ(value & shifted_out, 0);
7781           emit_simple_li(rd, (value & 0xfff) == 0 ? value : value + shifted_out);
7782           Srli(rd, rd, shamt);
7783           return;
7784         }
7785       }
7786 
7787       size_t ctz = CTZ(static_cast<uint64_t>(value));
7788       if (IsInt(ctz + 20, value)) {
7789         size_t new_insns_needed = /*ADDI or LUI*/ 1u + /*SLLI*/ 1u + /*SRLI*/ 1u;
7790         if (new_insns_needed < insns_needed) {
7791           // Clang prefers ADDI+SLLI+SRLI over LUI+SLLI+SRLI.
7792           if (IsInt(ctz + 12, value)) {
7793             Addi(rd, Zero, value >> ctz);
7794             Slli(rd, rd, ctz);
7795           } else {
7796             Lui(rd, (static_cast<uint64_t>(value) >> ctz) & 0xfffffu);
7797             Slli(rd, rd, ctz - 12);
7798           }
7799           Srli(rd, rd, shamt);
7800           return;
7801         }
7802       }
7803     }
7804 
7805     // If we can use a scratch register, try using it to emit a shorter sequence. Without a
7806     // scratch reg, the sequence is up to 8 instructions, with a scratch reg only up to 6.
7807     if (can_use_tmp) {
7808       int64_t low = (imm & 0xffffffff) - ((imm & 0x80000000) << 1);
7809       int64_t remainder = imm - low;
7810       size_t slli_shamt = CTZ(remainder);
7811       DCHECK_GE(slli_shamt, 32u);
7812       int64_t high = remainder >> slli_shamt;
7813       size_t new_insns_needed =
7814           ((IsInt<20>(high) || (high & 0xfff) == 0u) ? 1u : 2u) +
7815           count_simple_li_instructions(low) +
7816           /*SLLI+ADD*/ 2u;
7817       if (new_insns_needed < insns_needed) {
7818         DCHECK_NE(low & 0xfffff000, 0);
7819         XRegister tmp = srs.AllocateXRegister();
7820         if (IsInt<20>(high) && !IsInt<12>(high)) {
7821           // Emit the signed 20-bit value with LUI and reduce the SLLI shamt by 12 to compensate.
7822           Lui(rd, static_cast<uint32_t>(high & 0xfffff));
7823           slli_shamt -= 12;
7824         } else {
7825           emit_simple_li(rd, high);
7826         }
7827         emit_simple_li(tmp, low);
7828         Slli(rd, rd, slli_shamt);
7829         Add(rd, rd, tmp);
7830         return;
7831       }
7832     }
7833   }
7834   emit_with_slli_addi(rd, trailing_slli_shamt != 0u ? imm >> trailing_slli_shamt : imm);
7835   if (trailing_slli_shamt != 0u) {
7836     Slli(rd, rd, trailing_slli_shamt);
7837   }
7838 }
7839 
7840 /////////////////////////////// RV64 VARIANTS extension end ////////////
7841 
7842 }  // namespace riscv64
7843 }  // namespace art
7844