/external/libvpx/libvpx/vp8/common/mips/msa/ |
D | bilinear_filter_msa.c | 77 SRARI_H4_UH(vec4, vec5, vec6, vec7, VP8_FILTER_SHIFT); in common_hz_2t_4x8_msa() 117 SRARI_H4_UH(vec0, vec1, vec2, vec3, VP8_FILTER_SHIFT); in common_hz_2t_8x4_msa() 142 SRARI_H4_UH(vec0, vec1, vec2, vec3, VP8_FILTER_SHIFT); in common_hz_2t_8x8mult_msa() 155 SRARI_H4_UH(vec0, vec1, vec2, vec3, VP8_FILTER_SHIFT); in common_hz_2t_8x8mult_msa() 169 SRARI_H4_UH(vec0, vec1, vec2, vec3, VP8_FILTER_SHIFT); in common_hz_2t_8x8mult_msa() 180 SRARI_H4_UH(vec0, vec1, vec2, vec3, VP8_FILTER_SHIFT); in common_hz_2t_8x8mult_msa() 228 SRARI_H4_UH(out0, out1, out2, out3, VP8_FILTER_SHIFT); in common_hz_2t_16w_msa() 229 SRARI_H4_UH(out4, out5, out6, out7, VP8_FILTER_SHIFT); in common_hz_2t_16w_msa() 253 SRARI_H4_UH(out0, out1, out2, out3, VP8_FILTER_SHIFT); in common_hz_2t_16w_msa() 254 SRARI_H4_UH(out4, out5, out6, out7, VP8_FILTER_SHIFT); in common_hz_2t_16w_msa() [all …]
|
D | vp8_macros_msa.h | 1432 #define SRARI_H4_UH(...) SRARI_H4(v8u16, __VA_ARGS__) macro
|
/external/libvpx/libvpx/vpx_dsp/mips/ |
D | vpx_convolve8_horiz_msa.c | 357 SRARI_H4_UH(vec4, vec5, vec6, vec7, FILTER_BITS); in common_hz_2t_4x8_msa() 393 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_8x4_msa() 418 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_8x8mult_msa() 431 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_8x8mult_msa() 444 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_8x8mult_msa() 455 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_8x8mult_msa() 499 SRARI_H4_UH(out0, out1, out2, out3, FILTER_BITS); in common_hz_2t_16w_msa() 500 SRARI_H4_UH(out4, out5, out6, out7, FILTER_BITS); in common_hz_2t_16w_msa() 523 SRARI_H4_UH(out0, out1, out2, out3, FILTER_BITS); in common_hz_2t_16w_msa() 524 SRARI_H4_UH(out4, out5, out6, out7, FILTER_BITS); in common_hz_2t_16w_msa() [all …]
|
D | vpx_convolve8_avg_horiz_msa.c | 368 SRARI_H4_UH(vec4, vec5, vec6, vec7, FILTER_BITS); in common_hz_2t_and_aver_dst_4x8_msa() 413 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_and_aver_dst_8x4_msa() 441 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_and_aver_dst_8x8mult_msa() 453 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_and_aver_dst_8x8mult_msa() 467 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_and_aver_dst_8x8mult_msa() 478 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in common_hz_2t_and_aver_dst_8x8mult_msa() 529 SRARI_H4_UH(res0, res1, res2, res3, FILTER_BITS); in common_hz_2t_and_aver_dst_16w_msa() 530 SRARI_H4_UH(res4, res5, res6, res7, FILTER_BITS); in common_hz_2t_and_aver_dst_16w_msa() 554 SRARI_H4_UH(res0, res1, res2, res3, FILTER_BITS); in common_hz_2t_and_aver_dst_16w_msa() 555 SRARI_H4_UH(res4, res5, res6, res7, FILTER_BITS); in common_hz_2t_and_aver_dst_16w_msa() [all …]
|
D | sub_pixel_variance_msa.c | 425 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in sub_pixel_sse_diff_4width_h_msa() 469 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in sub_pixel_sse_diff_8width_h_msa() 519 SRARI_H4_UH(out0, out1, out2, out3, FILTER_BITS); in sub_pixel_sse_diff_16width_h_msa() 520 SRARI_H4_UH(out4, out5, out6, out7, FILTER_BITS); in sub_pixel_sse_diff_16width_h_msa() 661 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in sub_pixel_sse_diff_8width_v_msa() 1054 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in sub_pixel_avg_sse_diff_4width_h_msa() 1101 SRARI_H4_UH(vec0, vec1, vec2, vec3, FILTER_BITS); in sub_pixel_avg_sse_diff_8width_h_msa() 1164 SRARI_H4_UH(out0, out1, out2, out3, FILTER_BITS); in subpel_avg_ssediff_16w_h_msa() 1165 SRARI_H4_UH(out4, out5, out6, out7, FILTER_BITS); in subpel_avg_ssediff_16w_h_msa() 1333 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in sub_pixel_avg_sse_diff_8width_v_msa()
|
D | vpx_convolve8_vert_msa.c | 351 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_4x8_msa() 384 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_8x4_msa() 416 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_8x8mult_msa() 423 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_8x8mult_msa()
|
D | vpx_convolve8_avg_vert_msa.c | 325 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_and_aver_dst_4x8_msa() 366 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_and_aver_dst_8x4_msa() 402 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_and_aver_dst_8x8mult_msa() 409 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_vt_2t_and_aver_dst_8x8mult_msa()
|
D | vpx_convolve8_msa.c | 300 SRARI_H4_UH(vec4, vec5, vec6, vec7, FILTER_BITS); in common_hv_2ht_2vt_4x8_msa() 358 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_hv_2ht_2vt_8x4_msa() 435 SRARI_H4_UH(tmp5, tmp6, tmp7, tmp8, FILTER_BITS); in common_hv_2ht_2vt_8x8mult_msa()
|
D | vpx_convolve8_avg_msa.c | 325 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_hv_2ht_2vt_and_aver_dst_4x8_msa() 392 SRARI_H4_UH(tmp0, tmp1, tmp2, tmp3, FILTER_BITS); in common_hv_2ht_2vt_and_aver_dst_8x4_msa()
|
D | macros_msa.h | 1499 #define SRARI_H4_UH(...) SRARI_H4(v8u16, __VA_ARGS__) macro
|