/external/libvpx/libvpx/vp8/common/mips/msa/ |
D | postproc_msa.c | 208 ref = LD_UB(f); in postproc_down_across_chroma_msa() 210 src = LD_UB(p_src); in postproc_down_across_chroma_msa() 213 above2 = LD_UB(p_src + 3 * src_stride); in postproc_down_across_chroma_msa() 215 above1 = LD_UB(p_src + 4 * src_stride); in postproc_down_across_chroma_msa() 217 src = LD_UB(p_src + 5 * src_stride); in postproc_down_across_chroma_msa() 219 below1 = LD_UB(p_src + 6 * src_stride); in postproc_down_across_chroma_msa() 221 below2 = LD_UB(p_src + 7 * src_stride); in postproc_down_across_chroma_msa() 223 above2 = LD_UB(p_src + 8 * src_stride); in postproc_down_across_chroma_msa() 225 above1 = LD_UB(p_src + 9 * src_stride); in postproc_down_across_chroma_msa() 237 ref = LD_UB(f); in postproc_down_across_chroma_msa() [all …]
|
D | sixtap_filter_msa.c | 150 mask0 = LD_UB(&vp8_mc_filt_mask_arr[16]); in common_hz_6t_4x4_msa() 177 mask0 = LD_UB(&vp8_mc_filt_mask_arr[16]); in common_hz_6t_4x8_msa() 227 mask0 = LD_UB(&vp8_mc_filt_mask_arr[0]); in common_hz_6t_8w_msa() 273 mask0 = LD_UB(&vp8_mc_filt_mask_arr[0]); in common_hz_6t_16w_msa() 489 mask0 = LD_UB(&vp8_mc_filt_mask_arr[16]); in common_hv_6ht_6vt_4w_msa() 564 mask0 = LD_UB(&vp8_mc_filt_mask_arr[0]); in common_hv_6ht_6vt_8w_msa() 972 mask0 = LD_UB(&vp8_mc_filt_mask_arr[16]); in common_hv_4ht_4vt_4w_msa() 1032 mask0 = LD_UB(&vp8_mc_filt_mask_arr[0]); in common_hv_4ht_4vt_8w_msa() 1115 mask0 = LD_UB(&vp8_mc_filt_mask_arr[16]); in common_hv_6ht_4vt_4w_msa()
|
D | bilinear_filter_msa.c | 375 src0 = LD_UB(src); in common_vt_2t_8x8mult_msa() 433 src0 = LD_UB(src); in common_vt_2t_16w_msa()
|
D | vp8_macros_msa.h | 20 #define LD_UB(...) LD_B(v16u8, __VA_ARGS__) macro
|
/external/libvpx/libvpx/vp8/encoder/mips/msa/ |
D | denoising_msa.c | 74 mc_running_avg_y0 = LD_UB(mc_running_avg_y_ptr); in vp8_denoiser_filter_msa() 75 sig0 = LD_UB(sig_ptr); in vp8_denoiser_filter_msa() 79 mc_running_avg_y1 = LD_UB(mc_running_avg_y_ptr); in vp8_denoiser_filter_msa() 80 sig1 = LD_UB(sig_ptr); in vp8_denoiser_filter_msa() 208 running_avg_y = LD_UB(running_avg_y_ptr); in vp8_denoiser_filter_msa() 209 mc_running_avg_y0 = LD_UB(mc_running_avg_y_ptr); in vp8_denoiser_filter_msa() 210 sig0 = LD_UB(sig_ptr); in vp8_denoiser_filter_msa() 214 mc_running_avg_y1 = LD_UB(mc_running_avg_y_ptr); in vp8_denoiser_filter_msa() 215 sig1 = LD_UB(sig_ptr); in vp8_denoiser_filter_msa() 216 running_avg_y1 = LD_UB(running_avg_y_ptr); in vp8_denoiser_filter_msa() [all …]
|
/external/libvpx/libvpx/vpx_dsp/mips/ |
D | variance_msa.c | 104 src = LD_UB(src_ptr); in sse_diff_16width_msa() 106 ref = LD_UB(ref_ptr); in sse_diff_16width_msa() 110 src = LD_UB(src_ptr); in sse_diff_16width_msa() 112 ref = LD_UB(ref_ptr); in sse_diff_16width_msa() 116 src = LD_UB(src_ptr); in sse_diff_16width_msa() 118 ref = LD_UB(ref_ptr); in sse_diff_16width_msa() 122 src = LD_UB(src_ptr); in sse_diff_16width_msa() 124 ref = LD_UB(ref_ptr); in sse_diff_16width_msa() 390 src = LD_UB(src_ptr); in sse_16width_msa() 392 ref = LD_UB(ref_ptr); in sse_16width_msa() [all …]
|
D | idct16x16_msa.c | 387 dst0 = LD_UB(dst + 0 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 388 dst1 = LD_UB(dst + 15 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 404 dst8 = LD_UB(dst + 1 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 405 dst9 = LD_UB(dst + 14 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 419 dst4 = LD_UB(dst + 3 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 420 dst5 = LD_UB(dst + 12 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 431 dst12 = LD_UB(dst + 2 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 432 dst13 = LD_UB(dst + 13 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 444 dst6 = LD_UB(dst + 4 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() 445 dst7 = LD_UB(dst + 11 * dst_stride); in vpx_iadst16_1d_columns_addblk_msa() [all …]
|
D | sub_pixel_variance_msa.c | 60 pred = LD_UB(sec_pred); in avg_sse_diff_4width_msa() 128 pred = LD_UB(sec_pred); in avg_sse_diff_16width_msa() 130 src = LD_UB(src_ptr); in avg_sse_diff_16width_msa() 132 ref = LD_UB(ref_ptr); in avg_sse_diff_16width_msa() 137 pred = LD_UB(sec_pred); in avg_sse_diff_16width_msa() 139 src = LD_UB(src_ptr); in avg_sse_diff_16width_msa() 141 ref = LD_UB(ref_ptr); in avg_sse_diff_16width_msa() 146 pred = LD_UB(sec_pred); in avg_sse_diff_16width_msa() 148 src = LD_UB(src_ptr); in avg_sse_diff_16width_msa() 150 ref = LD_UB(ref_ptr); in avg_sse_diff_16width_msa() [all …]
|
D | loopfilter_16_msa.c | 93 flat = LD_UB(filter48 + 96); in vpx_hz_lpf_t16_16w() 215 filter8 = LD_UB(filter48); in vpx_hz_lpf_t16_16w() 236 filter8 = LD_UB(filter48 + 16); in vpx_hz_lpf_t16_16w() 257 filter8 = LD_UB(filter48 + 32); in vpx_hz_lpf_t16_16w() 278 filter8 = LD_UB(filter48 + 48); in vpx_hz_lpf_t16_16w() 298 filter8 = LD_UB(filter48 + 64); in vpx_hz_lpf_t16_16w() 317 filter8 = LD_UB(filter48 + 80); in vpx_hz_lpf_t16_16w() 825 flat = LD_UB(filter48 + 6 * 16); in vpx_vt_lpf_t16_8w() 914 filter8 = LD_UB(filter48); in vpx_vt_lpf_t16_8w() 927 filter8 = LD_UB(filter48 + 16); in vpx_vt_lpf_t16_8w() [all …]
|
D | sad_msa.c | 247 src = LD_UB(src_ptr); in sad_16width_x3_msa() 263 src = LD_UB(src_ptr); in sad_16width_x3_msa() 345 ref0_4 = LD_UB(ref + 64); in sad_64width_x3_msa() 542 src = LD_UB(src_ptr); in sad_16width_x8_msa() 578 src = LD_UB(src_ptr); in sad_16width_x8_msa() 925 src = LD_UB(src_ptr); in sad_16width_x4d_msa() 927 ref0 = LD_UB(ref0_ptr); in sad_16width_x4d_msa() 929 ref1 = LD_UB(ref1_ptr); in sad_16width_x4d_msa() 931 ref2 = LD_UB(ref2_ptr); in sad_16width_x4d_msa() 933 ref3 = LD_UB(ref3_ptr); in sad_16width_x4d_msa() [all …]
|
D | intrapred_msa.c | 48 src0 = LD_UB(src); in intra_predict_vert_16x16_msa() 61 src1 = LD_UB(src); in intra_predict_vert_32x32_msa() 62 src2 = LD_UB(src + 16); in intra_predict_vert_32x32_msa() 270 top = LD_UB(src_top); in intra_predict_dc_16x16_msa() 271 left = LD_UB(src_left); in intra_predict_dc_16x16_msa() 293 data = LD_UB(src); in intra_predict_dc_tl_16x16_msa()
|
D | vpx_convolve8_horiz_msa.c | 22 mask0 = LD_UB(&mc_filt_mask_arr[16]); in common_hz_8t_4x4_msa() 51 mask0 = LD_UB(&mc_filt_mask_arr[16]); in common_hz_8t_4x8_msa() 97 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_8x4_msa() 128 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_8x8mult_msa() 173 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_16w_msa() 211 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_32w_msa() 269 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_64w_msa()
|
D | vpx_convolve8_avg_horiz_msa.c | 25 mask0 = LD_UB(&mc_filt_mask_arr[16]); in common_hz_8t_and_aver_dst_4x4_msa() 60 mask0 = LD_UB(&mc_filt_mask_arr[16]); in common_hz_8t_and_aver_dst_4x8_msa() 118 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_and_aver_dst_8w_msa() 158 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_and_aver_dst_16w_msa() 214 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_and_aver_dst_32w_msa() 271 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hz_8t_and_aver_dst_64w_msa()
|
D | vpx_convolve8_vert_msa.c | 403 src0 = LD_UB(src); in common_vt_2t_8x8mult_msa() 455 src0 = LD_UB(src); in common_vt_2t_16w_msa() 503 src0 = LD_UB(src); in common_vt_2t_32w_msa() 504 src5 = LD_UB(src + 16); in common_vt_2t_32w_msa()
|
D | vpx_convolve8_msa.c | 36 mask0 = LD_UB(&mc_filt_mask_arr[16]); in common_hv_8ht_8vt_4w_msa() 111 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hv_8ht_8vt_8w_msa()
|
D | vpx_convolve8_avg_msa.c | 30 mask0 = LD_UB(&mc_filt_mask_arr[16]); in common_hv_8ht_8vt_and_aver_dst_4w_msa() 113 mask0 = LD_UB(&mc_filt_mask_arr[0]); in common_hv_8ht_8vt_and_aver_dst_8w_msa()
|
D | vpx_convolve8_avg_vert_msa.c | 388 src0 = LD_UB(src); in common_vt_2t_and_aver_dst_8x8mult_msa() 447 src0 = LD_UB(src); in common_vt_2t_and_aver_dst_16w_msa()
|
D | macros_msa.h | 20 #define LD_UB(...) LD_B(v16u8, __VA_ARGS__) macro
|