1    /*
2     * Array get, 64 bits.  vAA <- vBB[vCC].
3     *
4     * Arrays of long/double are 64-bit aligned.
5     */
6    /* aget-wide vAA, vBB, vCC */
7    FETCH(a0, 1)                           #  a0 <- CCBB
8    GET_OPA(rOBJ)                          #  rOBJ <- AA
9    and       a2, a0, 255                  #  a2 <- BB
10    srl       a3, a0, 8                    #  a3 <- CC
11    GET_VREG(a0, a2)                       #  a0 <- vBB (array object)
12    GET_VREG(a1, a3)                       #  a1 <- vCC (requested index)
13    # null array object?
14    beqz      a0, common_errNullObject     #  yes, bail
15    LOAD_base_offMirrorArray_length(a3, a0) #  a3 <- arrayObj->length
16    EAS3(a0, a0, a1)                       #  a0 <- arrayObj + index*width
17    bgeu      a1, a3, common_errArrayIndex #  index >= length, bail
18
19    FETCH_ADVANCE_INST(2)                  #  advance rPC, load rINST
20    LOAD64_off(a2, a3, a0, MIRROR_WIDE_ARRAY_DATA_OFFSET)
21    GET_INST_OPCODE(t0)                    #  extract opcode from rINST
22    SET_VREG64_GOTO(a2, a3, rOBJ, t0)      #  vAA/vAA+1 <- a2/a3
23