Home
last modified time | relevance | path

Searched refs:v2tmp1 (Results 1 – 4 of 4) sorted by relevance

/external/libvpx/libvpx/vp8/encoder/arm/neon/
Dshortfdct_neon.c21 int16x4x2_t v2tmp0, v2tmp1; in vp8_short_fdct4x4_neon() local
45 v2tmp1 = vtrn_s16(vreinterpret_s16_s32(v2tmp2.val[1]), // d2 in vp8_short_fdct4x4_neon()
48 d4s16 = vadd_s16(v2tmp0.val[0], v2tmp1.val[1]); in vp8_short_fdct4x4_neon()
49 d5s16 = vadd_s16(v2tmp0.val[1], v2tmp1.val[0]); in vp8_short_fdct4x4_neon()
50 d6s16 = vsub_s16(v2tmp0.val[1], v2tmp1.val[0]); in vp8_short_fdct4x4_neon()
51 d7s16 = vsub_s16(v2tmp0.val[0], v2tmp1.val[1]); in vp8_short_fdct4x4_neon()
74 v2tmp1 = vtrn_s16(vreinterpret_s16_s32(v2tmp2.val[1]), // d2 in vp8_short_fdct4x4_neon()
77 d4s16 = vadd_s16(v2tmp0.val[0], v2tmp1.val[1]); in vp8_short_fdct4x4_neon()
78 d5s16 = vadd_s16(v2tmp0.val[1], v2tmp1.val[0]); in vp8_short_fdct4x4_neon()
79 d6s16 = vsub_s16(v2tmp0.val[1], v2tmp1.val[0]); in vp8_short_fdct4x4_neon()
[all …]
Dvp8_shortwalsh4x4_neon.c29 int16x4x2_t v2tmp0, v2tmp1; in vp8_short_walsh4x4_neon() local
48 v2tmp1 = vtrn_s16(vreinterpret_s16_s32(v2tmp2.val[1]), // d2 in vp8_short_walsh4x4_neon()
51 d4s16 = vadd_s16(v2tmp0.val[0], v2tmp1.val[0]); in vp8_short_walsh4x4_neon()
52 d5s16 = vadd_s16(v2tmp0.val[1], v2tmp1.val[1]); in vp8_short_walsh4x4_neon()
53 d6s16 = vsub_s16(v2tmp0.val[1], v2tmp1.val[1]); in vp8_short_walsh4x4_neon()
54 d7s16 = vsub_s16(v2tmp0.val[0], v2tmp1.val[0]); in vp8_short_walsh4x4_neon()
76 v2tmp1 = vtrn_s16(vreinterpret_s16_s32(v2tmp3.val[0]), // d0 in vp8_short_walsh4x4_neon()
79 q8s32 = vaddl_s16(v2tmp1.val[0], v2tmp0.val[0]); in vp8_short_walsh4x4_neon()
80 q9s32 = vaddl_s16(v2tmp1.val[1], v2tmp0.val[1]); in vp8_short_walsh4x4_neon()
81 q10s32 = vsubl_s16(v2tmp1.val[1], v2tmp0.val[1]); in vp8_short_walsh4x4_neon()
[all …]
/external/libvpx/libvpx/vp8/common/arm/neon/
Dshortidct4x4llm_neon.c31 int32x2x2_t v2tmp0, v2tmp1; in vp8_short_idct4x4llm_neon() local
62 v2tmp1 = vtrn_s32(vreinterpret_s32_s16(d3), vreinterpret_s32_s16(d5)); in vp8_short_idct4x4llm_neon()
64 vreinterpret_s16_s32(v2tmp1.val[0])); in vp8_short_idct4x4llm_neon()
66 vreinterpret_s16_s32(v2tmp1.val[1])); in vp8_short_idct4x4llm_neon()
96 v2tmp1 = vtrn_s32(vreinterpret_s32_s16(d3), vreinterpret_s32_s16(d5)); in vp8_short_idct4x4llm_neon()
98 vreinterpret_s16_s32(v2tmp1.val[0])); in vp8_short_idct4x4llm_neon()
100 vreinterpret_s16_s32(v2tmp1.val[1])); in vp8_short_idct4x4llm_neon()
Diwalsh_neon.c18 int16x4x2_t v2tmp0, v2tmp1; in vp8_short_inv_walsh4x4_neon() local
43 v2tmp1 = vtrn_s16(vreinterpret_s16_s32(v2tmp2.val[1]), in vp8_short_inv_walsh4x4_neon()
47 d4s16 = vadd_s16(v2tmp0.val[0], v2tmp1.val[1]); in vp8_short_inv_walsh4x4_neon()
48 d6s16 = vadd_s16(v2tmp0.val[1], v2tmp1.val[0]); in vp8_short_inv_walsh4x4_neon()
49 d5s16 = vsub_s16(v2tmp0.val[0], v2tmp1.val[1]); in vp8_short_inv_walsh4x4_neon()
50 d7s16 = vsub_s16(v2tmp0.val[1], v2tmp1.val[0]); in vp8_short_inv_walsh4x4_neon()