/art/runtime/interpreter/mterp/mips/ |
D | op_packed_switch.S | 13 FETCH(a0, 1) # a0 <- bbbb (lo) 17 or a0, a0, t0 # a0 <- BBBBbbbb 19 EAS1(a0, rPC, a0) # a0 <- PC + BBBBbbbb*2 20 JAL($func) # a0 <- code-unit branch offset 23 move a0, rSELF 34 FETCH(a0, 1) # a0 <- bbbb (lo) 38 or a0, a0, t0 # a0 <- BBBBbbbb 40 EAS1(a0, rPC, a0) # a0 <- PC + BBBBbbbb*2 41 JAL($func) # a0 <- code-unit branch offset
|
D | op_shl_long.S | 8 FETCH(a0, 1) # a0 <- CCBB 10 and a3, a0, 255 # a3 <- BB 11 srl a0, a0, 8 # a0 <- CC 13 GET_VREG(a2, a0) # a2 <- vCC 14 LOAD64(a0, a1, a3) # a0/a1 <- vBB/vBB+1 20 sll v0, a0, a2 # rlo<- alo << (shift&31) 23 srl a0, 1 24 srl a0, v1 # alo<- alo >> (32-(shift&31)) 26 or v1, a0 # rhi<- rhi | alo 27 SET_VREG64_GOTO(v0, v1, t2, t0) # vAA/vAA+1 <- a0/a1
|
D | op_aget_wide.S | 7 FETCH(a0, 1) # a0 <- CCBB 9 and a2, a0, 255 # a2 <- BB 10 srl a3, a0, 8 # a3 <- CC 11 GET_VREG(a0, a2) # a0 <- vBB (array object) 14 beqz a0, common_errNullObject # yes, bail 15 LOAD_base_offMirrorArray_length(a3, a0) # a3 <- arrayObj->length 16 EAS3(a0, a0, a1) # a0 <- arrayObj + index*width 20 LOAD64_off(a2, a3, a0, MIRROR_WIDE_ARRAY_DATA_OFFSET)
|
D | op_aput_wide.S | 7 FETCH(a0, 1) # a0 <- CCBB 9 and a2, a0, 255 # a2 <- BB 10 srl a3, a0, 8 # a3 <- CC 11 GET_VREG(a0, a2) # a0 <- vBB (array object) 14 beqz a0, common_errNullObject # yes, bail 15 LOAD_base_offMirrorArray_length(a3, a0) # a3 <- arrayObj->length 16 EAS3(a0, a0, a1) # a0 <- arrayObj + index*width 24 STORE64_off(a2, a3, a0, MIRROR_WIDE_ARRAY_DATA_OFFSET) # a2/a3 <- vBB[vCC]
|
D | op_mul_long.S | 13 FETCH(a0, 1) # a0 <- CCBB 14 and t0, a0, 255 # a2 <- BB 15 srl t1, a0, 8 # a3 <- CC 17 LOAD64(a0, a1, t0) # a0/a1 <- vBB/vBB+1 22 mul v1, a3, a0 # v1= a3a0 24 mulu v0, a2, a0 # v0= a2a0 25 muhu t1, a2, a0 27 multu a2, a0 35 GET_OPA(a0) # a0 <- AA 42 SET_VREG64(v0, v1, a0) # vAA::vAA+1 <- v0(low) :: v1(high)
|
D | op_ushr_long.S | 8 FETCH(a0, 1) # a0 <- CCBB 10 and a3, a0, 255 # a3 <- BB 11 srl a0, a0, 8 # a0 <- CC 13 GET_VREG(a2, a0) # a2 <- vCC 14 LOAD64(a0, a1, a3) # a0/a1 <- vBB/vBB+1 22 srl v0, a0, a2 # rlo<- alo >> (shift&31) 23 not a0, a2 # alo<- 31-n (shift is 5b) 25 sll a1, a0 # ahi<- ahi << (32-(shift&31))
|
D | op_shr_long.S | 8 FETCH(a0, 1) # a0 <- CCBB 10 and a3, a0, 255 # a3 <- BB 11 srl a0, a0, 8 # a0 <- CC 13 GET_VREG(a2, a0) # a2 <- vCC 14 LOAD64(a0, a1, a3) # a0/a1 <- vBB/vBB+1 21 srl v0, a0, a2 # rlo<- alo >> (shift&31) 22 not a0, a2 # alo<- 31-shift (shift is 5b) 24 sll a1, a0 # ahi<- ahi << (32-(shift&31))
|
D | op_aget.S | 17 GET_VREG(a0, a2) # a0 <- vBB (array object) 20 beqz a0, common_errNullObject # yes, bail 21 LOAD_base_offMirrorArray_length(a3, a0) # a3 <- arrayObj->length 23 EASN(a0, a0, a1, $shift) # a0 <- arrayObj + index*width 25 addu a0, a0, a1 30 $load a2, $data_offset(a0) # a2 <- vBB[vCC]
|
D | op_aput.S | 15 GET_VREG(a0, a2) # a0 <- vBB (array object) 18 beqz a0, common_errNullObject # yes, bail 19 LOAD_base_offMirrorArray_length(a3, a0) # a3 <- arrayObj->length 21 EASN(a0, a0, a1, $shift) # a0 <- arrayObj + index*width 23 addu a0, a0, a1 29 $store a2, $data_offset(a0) # vBB[vCC] <- a2
|
D | op_move_result.S | 6 lw a0, OFF_FP_RESULT_REGISTER(rFP) # get pointer to result JType 7 lw a0, 0(a0) # a0 <- result.i 10 SET_VREG_OBJECT(a0, a2) # fp[AA] <- a0 12 SET_VREG(a0, a2) # fp[AA] <- a0
|
D | op_const_wide_32.S | 2 FETCH(a0, 1) # a0 <- 0000bbbb (low) 7 or a0, a0, a2 # a0 <- BBBBbbbb 8 sra a1, a0, 31 # a1 <- ssssssss 10 SET_VREG64(a0, a1, a3) # vAA <- a0/a1
|
D | op_const_high16.S | 2 FETCH(a0, 1) # a0 <- 0000BBBB (zero-extended) 4 sll a0, a0, 16 # a0 <- BBBB0000 7 SET_VREG_GOTO(a0, a3, t0) # vAA <- a0
|
D | op_const.S | 3 FETCH(a0, 1) # a0 <- bbbb (low) 7 or a0, a1, a0 # a0 <- BBBBbbbb 9 SET_VREG_GOTO(a0, a3, t0) # vAA <- a0
|
D | binop.S | 17 FETCH(a0, 1) # a0 <- CCBB 19 srl a3, a0, 8 # a3 <- CC 20 and a2, a0, 255 # a2 <- BB 22 GET_VREG(a0, a2) # a0 <- vBB 30 $instr # $result <- op, a0-a3 changed
|
D | op_iput_wide_quick.S | 2 GET_OPA4(a0) # a0 <- A(+) 7 EAS2(a3, rFP, a0) # a3 <- &fp[A] 8 LOAD64(a0, a1, a3) # a0/a1 <- fp[A] 11 addu a2, a2, a3 # obj.field (64 bits, aligned) <- a0/a1 12 STORE64(a0, a1, a2) # obj.field (64 bits, aligned) <- a0/a1
|
/art/runtime/interpreter/mterp/mips64/ |
D | op_aget.S | 14 GET_VREG_U a0, a2 # a0 <- vBB (array object) 16 beqz a0, common_errNullObject # bail if null array object 17 lw a3, MIRROR_ARRAY_LENGTH_OFFSET(a0) # a3 <- arrayObj->length 20 dlsa a0, a1, a0, $shift # a0 <- arrayObj + index*width 22 daddu a0, a1, a0 # a0 <- arrayObj + index*width 26 $load a2, $data_offset(a0) # a2 <- vBB[vCC]
|
D | op_aput.S | 14 GET_VREG_U a0, a2 # a0 <- vBB (array object) 16 beqz a0, common_errNullObject # bail if null array object 17 lw a3, MIRROR_ARRAY_LENGTH_OFFSET(a0) # a3 <- arrayObj->length 20 dlsa a0, a1, a0, $shift # a0 <- arrayObj + index*width 22 daddu a0, a1, a0 # a0 <- arrayObj + index*width 28 $store a2, $data_offset(a0) # vBB[vCC] <- a2
|
D | op_packed_switch.S | 14 lh a0, 2(rPC) # a0 <- bbbb (lo) 17 ins a0, a1, 16, 16 # a0 <- BBBBbbbb 19 dlsa a0, a0, rPC, 1 # a0 <- PC + BBBBbbbb*2 24 move a0, rSELF 32 move a0, rINST # a0 <- offset 34 blez a0, MterpCheckSuspendAndContinue # suspend check if backwards branch
|
D | op_iput_wide_quick.S | 5 ext a0, rINST, 8, 4 # a0 <- A 7 GET_VREG_WIDE a0, a0 # a0 <- fp[A] 10 sw a0, 0(a1) 11 dsrl32 a0, a0, 0 12 sw a0, 4(a1)
|
D | op_cmp_long.S | 5 GET_VREG_WIDE a0, a2 # a0 <- vBB 8 slt a2, a0, a1 9 slt a0, a1, a0 10 subu a0, a0, a2 12 SET_VREG a0, a4 # vAA <- result
|
D | op_aget_wide.S | 9 GET_VREG_U a0, a2 # a0 <- vBB (array object) 11 beqz a0, common_errNullObject # bail if null array object 12 lw a3, MIRROR_ARRAY_LENGTH_OFFSET(a0) # a3 <- arrayObj->length 13 dlsa a0, a1, a0, 3 # a0 <- arrayObj + index*width 16 lw a2, MIRROR_WIDE_ARRAY_DATA_OFFSET(a0) 17 lw a3, (MIRROR_WIDE_ARRAY_DATA_OFFSET+4)(a0)
|
D | op_aput_wide.S | 9 GET_VREG_U a0, a2 # a0 <- vBB (array object) 11 beqz a0, common_errNullObject # bail if null array object 12 lw a3, MIRROR_ARRAY_LENGTH_OFFSET(a0) # a3 <- arrayObj->length 13 dlsa a0, a1, a0, 3 # a0 <- arrayObj + index*width 18 sw a2, MIRROR_WIDE_ARRAY_DATA_OFFSET(a0) 20 sw a2, (MIRROR_WIDE_ARRAY_DATA_OFFSET+4)(a0) # vBB[vCC] <- a2
|
D | op_const_wide_high16.S | 3 lh a0, 2(rPC) # a0 <- BBBB 5 dsll32 a0, a0, 16 # a0 <- BBBB000000000000 7 SET_VREG_WIDE a0, a2 # vAA <- +BBBB000000000000
|
D | op_const_high16.S | 3 lh a0, 2(rPC) # a0 <- BBBB 5 sll a0, a0, 16 # a0 <- BBBB0000 7 SET_VREG a0, a2 # vAA <- +BBBB0000
|
D | op_const_wide.S | 3 lh a0, 2(rPC) # a0 <- bbbb (low) 8 ins a0, a1, 16, 16 # a0 = BBBBbbbb 10 dinsu a0, a2, 32, 32 # a0 = HHHHhhhhBBBBbbbb 12 SET_VREG_WIDE a0, a4 # vAA <- +HHHHhhhhBBBBbbbb
|