/external/libvpx/libvpx/vpx_dsp/mips/ |
D | variance_msa.c | 144 LD_UB2(src_ptr, 16, src0, src1); in sse_diff_32width_msa() 146 LD_UB2(ref_ptr, 16, ref0, ref1); in sse_diff_32width_msa() 151 LD_UB2(src_ptr, 16, src0, src1); in sse_diff_32width_msa() 153 LD_UB2(ref_ptr, 16, ref0, ref1); in sse_diff_32width_msa() 158 LD_UB2(src_ptr, 16, src0, src1); in sse_diff_32width_msa() 160 LD_UB2(ref_ptr, 16, ref0, ref1); in sse_diff_32width_msa() 165 LD_UB2(src_ptr, 16, src0, src1); in sse_diff_32width_msa() 167 LD_UB2(ref_ptr, 16, ref0, ref1); in sse_diff_32width_msa() 189 LD_UB2(src_ptr, 16, src0, src1); in sse_diff_32x64_msa() 191 LD_UB2(ref_ptr, 16, ref0, ref1); in sse_diff_32x64_msa() [all …]
|
D | sad_msa.c | 77 LD_UB2(src, src_stride, src0, src1); in sad_16width_msa() 79 LD_UB2(ref, ref_stride, ref0, ref1); in sad_16width_msa() 83 LD_UB2(src, src_stride, src0, src1); in sad_16width_msa() 85 LD_UB2(ref, ref_stride, ref0, ref1); in sad_16width_msa() 101 LD_UB2(src, 16, src0, src1); in sad_32width_msa() 103 LD_UB2(ref, 16, ref0, ref1); in sad_32width_msa() 107 LD_UB2(src, 16, src0, src1); in sad_32width_msa() 109 LD_UB2(ref, 16, ref0, ref1); in sad_32width_msa() 113 LD_UB2(src, 16, src0, src1); in sad_32width_msa() 115 LD_UB2(ref, 16, ref0, ref1); in sad_32width_msa() [all …]
|
D | sub_pixel_variance_msa.c | 95 LD_UB2(sec_pred, 16, pred0, pred1); in avg_sse_diff_8width_msa() 184 LD_UB2(sec_pred, 16, pred0, pred1); in avg_sse_diff_32width_msa() 186 LD_UB2(src_ptr, 16, src0, src1); in avg_sse_diff_32width_msa() 188 LD_UB2(ref_ptr, 16, ref0, ref1); in avg_sse_diff_32width_msa() 194 LD_UB2(sec_pred, 16, pred0, pred1); in avg_sse_diff_32width_msa() 196 LD_UB2(src_ptr, 16, src0, src1); in avg_sse_diff_32width_msa() 198 LD_UB2(ref_ptr, 16, ref0, ref1); in avg_sse_diff_32width_msa() 204 LD_UB2(sec_pred, 16, pred0, pred1); in avg_sse_diff_32width_msa() 206 LD_UB2(src_ptr, 16, src0, src1); in avg_sse_diff_32width_msa() 208 LD_UB2(ref_ptr, 16, ref0, ref1); in avg_sse_diff_32width_msa() [all …]
|
D | vpx_convolve8_avg_vert_msa.c | 499 LD_UB2(src, 16, src0, src5); in common_vt_2t_and_aver_dst_32w_msa() 578 LD_UB2(src, src_stride, src1, src2); in common_vt_2t_and_aver_dst_64w_msa() 579 LD_UB2(dst, dst_stride, dst0, dst1); in common_vt_2t_and_aver_dst_64w_msa() 580 LD_UB2(src + 16, src_stride, src4, src5); in common_vt_2t_and_aver_dst_64w_msa() 581 LD_UB2(dst + 16, dst_stride, dst2, dst3); in common_vt_2t_and_aver_dst_64w_msa() 582 LD_UB2(src + 32, src_stride, src7, src8); in common_vt_2t_and_aver_dst_64w_msa() 583 LD_UB2(dst + 32, dst_stride, dst4, dst5); in common_vt_2t_and_aver_dst_64w_msa() 584 LD_UB2(src + 48, src_stride, src10, src11); in common_vt_2t_and_aver_dst_64w_msa() 585 LD_UB2(dst + 48, dst_stride, dst6, dst7); in common_vt_2t_and_aver_dst_64w_msa()
|
D | vpx_convolve_avg_msa.c | 39 LD_UB2(src, src_stride, src0, src1); in avg_width4_msa() 42 LD_UB2(dst, dst_stride, dst0, dst1); in avg_width4_msa()
|
D | vpx_convolve8_avg_horiz_msa.c | 191 LD_UB2(dst, dst_stride, dst0, dst1); in common_hz_8t_and_aver_dst_16w_msa() 251 LD_UB2(dst, 16, dst1, dst2); in common_hz_8t_and_aver_dst_32w_msa() 310 LD_UB2(&dst[cnt << 5], 16, dst1, dst2); in common_hz_8t_and_aver_dst_64w_msa() 608 LD_UB2(dst, 16, dst0, dst1); in common_hz_2t_and_aver_dst_32w_msa() 612 LD_UB2(dst, 16, dst2, dst3); in common_hz_2t_and_aver_dst_32w_msa()
|
D | vpx_convolve8_vert_msa.c | 575 LD_UB2(src, src_stride, src1, src2); in common_vt_2t_64w_msa() 576 LD_UB2(src + 16, src_stride, src4, src5); in common_vt_2t_64w_msa() 577 LD_UB2(src + 32, src_stride, src7, src8); in common_vt_2t_64w_msa() 578 LD_UB2(src + 48, src_stride, src10, src11); in common_vt_2t_64w_msa()
|
D | intrapred_msa.c | 324 LD_UB2(src_top, 16, top0, top1); in intra_predict_dc_32x32_msa() 325 LD_UB2(src_left, 16, left0, left1); in intra_predict_dc_32x32_msa() 353 LD_UB2(src, 16, data0, data1); in intra_predict_dc_tl_32x32_msa()
|
D | vpx_convolve_copy_msa.c | 82 LD_UB2(src, src_stride, src0, src1); in copy_width8_msa()
|
D | idct32x32_msa.c | 720 LD_UB2(dst, 16, dst0, dst1); in vpx_idct32x32_1_add_msa() 721 LD_UB2(dst + dst_stride, 16, dst2, dst3); in vpx_idct32x32_1_add_msa()
|
D | loopfilter_16_msa.c | 101 LD_UB2(filter48 + 4 * 16, 16, q1, q2); in vpx_hz_lpf_t16_16w() 836 LD_UB2(filter48 + 4 * 16, 16, q1, q2); in vpx_vt_lpf_t16_8w() 1160 LD_UB2(filter48 + 4 * 16, 16, q1, q2); in vpx_vt_lpf_t16_16w()
|
D | macros_msa.h | 300 #define LD_UB2(...) LD_B2(v16u8, __VA_ARGS__) macro
|
/external/libvpx/libvpx/vp8/common/mips/msa/ |
D | postproc_msa.c | 209 LD_UB2(p_src - 2 * src_stride, src_stride, above2, above1); in postproc_down_across_chroma_msa() 211 LD_UB2(p_src + 1 * src_stride, src_stride, below1, below2); in postproc_down_across_chroma_msa() 238 LD_UB2(p_src - 2 * src_stride, src_stride, above2, above1); in postproc_down_across_chroma_msa() 240 LD_UB2(p_src + 1 * src_stride, src_stride, below1, below2); in postproc_down_across_chroma_msa() 343 LD_UB2(p_dst, dst_stride, inter0, inter1); in postproc_down_across_chroma_msa() 346 LD_UB2(p_dst + 2 * dst_stride, dst_stride, inter2, inter3); in postproc_down_across_chroma_msa() 349 LD_UB2(p_dst + 4 * dst_stride, dst_stride, inter4, inter5); in postproc_down_across_chroma_msa() 352 LD_UB2(p_dst + 6 * dst_stride, dst_stride, inter6, inter7); in postproc_down_across_chroma_msa() 378 LD_UB2(p_src - 2 * src_stride, src_stride, above2, above1); in postproc_down_across_luma_msa() 380 LD_UB2(p_src + 1 * src_stride, src_stride, below1, below2); in postproc_down_across_luma_msa() [all …]
|
D | vp8_macros_msa.h | 291 #define LD_UB2(...) LD_B2(v16u8, __VA_ARGS__) macro
|