/cts/tests/tests/media/common/jni/ |
D | codec-utils-jni.cpp | 190 std::unique_ptr<NativeImage> img(new NativeImage); in getNativeImage() local 191 img->format = env->CallIntMethod(image, gFields.methodFormat); in getNativeImage() 192 img->width = env->CallIntMethod(image, gFields.methodWidth); in getNativeImage() 193 img->height = env->CallIntMethod(image, gFields.methodHeight); in getNativeImage() 194 img->timestamp = env->CallLongMethod(image, gFields.methodTimestamp); in getNativeImage() 202 img->crop.left = env->GetIntField(area, gFields.fieldLeft); in getNativeImage() 203 img->crop.top = env->GetIntField(area, gFields.fieldTop); in getNativeImage() 204 img->crop.right = env->GetIntField(area, gFields.fieldRight); in getNativeImage() 205 img->crop.bottom = env->GetIntField(area, gFields.fieldBottom); in getNativeImage() 206 if (img->crop.right == 0 && img->crop.bottom == 0) { in getNativeImage() [all …]
|
/cts/hostsidetests/scopedstorage/redacturi/src/android/scopedstorage/cts/redacturi/ |
D | RedactUriDeviceTest.java | 134 final File img = stageImageFileWithMetadata(IMAGE_FILE_NAME); in testRedactedUri_single() local 137 final Uri uri = MediaStore.scanFile(getContentResolver(), img); in testRedactedUri_single() 141 img.delete(); in testRedactedUri_single() 172 final File img = stageImageFileWithMetadata(IMAGE_FILE_NAME); in testQueryOnRedactionUri() local 173 final Uri uri = MediaStore.scanFile(getContentResolver(), img); in testQueryOnRedactionUri() 241 img.delete(); in testQueryOnRedactionUri() 251 final File img = stageImageFileWithMetadata(IMAGE_FILE_NAME); in testSharedRedactedUri_openFdForWrite() local 253 Uri redactedUri = shareAndGetRedactedUri(img, APP_B_NO_PERMS); in testSharedRedactedUri_openFdForWrite() 257 img.delete(); in testSharedRedactedUri_openFdForWrite() 268 final File img = stageImageFileWithMetadata(IMAGE_FILE_NAME); in testSharedRedactedUri_openFdForRead() local [all …]
|
/cts/apps/CameraITS/utils/ |
D | low_light_utils.py | 54 def _crop(img): argument 66 data = img.reshape((-1, 3)) 87 mask = mask.reshape((img.shape[0], img.shape[1])) 110 cropped_img = img[ 116 return img 161 def _correct_image_rotation(img, regions): argument 199 img = cv2.rotate(img, cv2.ROTATE_90_CLOCKWISE) 200 img = cv2.flip(img, 0) 203 img = cv2.flip(img, -1) 210 img = cv2.rotate(img, cv2.ROTATE_90_COUNTERCLOCKWISE) [all …]
|
D | image_processing_utils.py | 125 img = convert_capture_to_rgb_image( 127 write_image(img, f'{name_with_log_path}_scene.jpg', True) 285 img = convert_raw_to_rgb_image(r, gr, gb, b, props, cap_raw['metadata']) 286 return img 355 def unpack_raw10_image(img): argument 367 if img.shape[1] % 5 != 0: 369 w = img.shape[1] * 4 // 5 370 h = img.shape[0] 372 msbs = numpy.delete(img, numpy.s_[4::5], 1) 377 lsbs = img[::, 4::5].reshape(h, w // 4) [all …]
|
D | image_fov_utils.py | 215 img = image_processing_utils.convert_capture_to_rgb_image( 218 img = cv2.resize(img, (0, 0), fx=2.0, fy=2.0) 229 k, opencv_dist, (img.shape[1], img.shape[0]), 0)[0] 234 img = cv2.undistort(img, k, opencv_dist, None, k_new) 236 img = cv2.undistort(img, k, opencv_dist) 237 size = img.shape 246 img = image_processing_utils.convert_capture_to_rgb_image(cap, props) 253 image_processing_utils.write_image(img, img_name, True) 256 img *= 255 # cv2 needs images between [0,255]. 258 img, img_name, CIRCLE_MIN_AREA, CIRCLE_COLOR) [all …]
|
D | opencv_processing_utils.py | 108 def convert_to_y(img, color_order='RGB'): argument 118 if img.dtype != 'uint8': 121 y, _, _ = cv2.split(cv2.cvtColor(img, cv2.COLOR_RGB2YUV)) 123 y, _, _ = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2YUV)) 155 def find_opencv_faces(img, scale_factor, min_neighbors): argument 168 img_uint8 = image_processing_utils.convert_image_to_uint8(img) 178 def find_all_contours(img): argument 181 _, contours, _ = cv2.findContours(img, cv2.RETR_TREE, 184 contours, _ = cv2.findContours(img, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) 222 def scale_img(img, scale=1.0): argument [all …]
|
D | capture_read_noise_utils.py | 187 def _generate_read_noise_stats(img, iso, white_level, cfa_order): argument 211 channel_img = image_processing_utils.subsample(img, num_channels) 366 img = image_processing_utils.unpack_raw10_image( 370 img = np.ndarray( 373 img = img.astype(dtype=np.uint16).reshape(h, w) 376 stats = _generate_read_noise_stats(img, iso_cap, white_level, cfa_order) 380 np.mean(img), np.var(img), np.min(img), np.max(img))
|
/cts/apps/CameraITS/tests/scene1_1/ |
D | test_crop_region_raw.py | 114 img = image_processing_utils.convert_capture_to_rgb_image(cap, 116 image_processing_utils.write_image(img, f'{name_with_log_path}_{s}.jpg') 117 imgs[s] = img 150 for s, img in imgs.items(): 151 h, w, _ = img.shape 153 img = img.reshape(h//2, 2, w//2, 2, 3).mean(3).mean(1) 154 img = img.reshape(h//2, w//2, 3) 155 imgs2[s] = img 171 for s, img in imgs2.items(): 173 img, f'{name_with_log_path}_comp_{s}.jpg')
|
D | test_jpeg.py | 36 def compute_img_means_and_save(img, fmt_name, log_path): argument 49 img, f'{name_with_log_path}_fmt={fmt_name}.jpg') 51 img, _PATCH_X, _PATCH_Y, _PATCH_W, _PATCH_H) 97 img = image_processing_utils.convert_capture_to_rgb_image(cap) 98 rgb_means_yuv = compute_img_means_and_save(img, 'yuv', log_path) 105 img = image_processing_utils.decompress_jpeg_to_rgb_image(cap['data']) 106 rgb_means_jpg = compute_img_means_and_save(img, 'jpg', log_path)
|
D | test_linearity.py | 98 img = image_processing_utils.convert_capture_to_rgb_image(cap) 100 img, f'{name_with_log_path}_sens={int(sens):04d}.jpg') 101 img = image_processing_utils.apply_lut_to_image( 102 img, _INV_GAMMA_LUT[1::2] * _L) 104 img, _PATCH_X, _PATCH_Y, _PATCH_W, _PATCH_H)
|
/cts/apps/CameraITS/tests/scene2_d/ |
D | test_autoframing.py | 42 img, img_name, faces_cropped = self.get_image_data(cap, props, faces) 45 img, faces_cropped, img_name) 48 img, img_name, faces_cropped = self.get_image_data(cap, props, faces) 50 img, _CV2_FACE_SCALE_FACTOR, _CV2_FACE_MIN_NEIGHBORS) 52 faces_cropped, opencv_faces, img, img_name) 55 img = image_processing_utils.convert_capture_to_rgb_image( 61 faces, img, crop_region) 63 return img, img_name, faces_cropped
|
/cts/hostsidetests/securitybulletin/securityPatch/CVE-2020-0034/ |
D | poc.cpp | 96 vpx_image_t *img = nullptr; in main() local 97 while ((img = vpx_codec_get_frame(&codec, &iter)) != nullptr) { in main() 98 if (img->d_w > img->w || img->d_h > img->h) { in main()
|
/cts/apps/CameraITS/tests/scene8/ |
D | test_ae_awb_regions.py | 86 def _define_metering_regions(img, img_path, chart_path, props, width, height): argument 102 img, img_path) 105 aruco_corners, aruco_ids, img, chart_path)) 219 img = image_processing_utils.convert_image_to_numpy_array( 221 key_frames.append(img) 248 def _get_red_blue_ratio(img): argument 256 img_means = image_processing_utils.compute_image_means(img) 318 img = image_processing_utils.convert_capture_to_rgb_image( 321 image_processing_utils.write_image(img, img_path) 322 img = image_processing_utils.convert_image_to_uint8(img) [all …]
|
/cts/apps/CameraITS/tests/scene1_2/ |
D | test_yuv_jpeg_all.py | 85 img = image_processing_utils.decompress_jpeg_to_rgb_image(cap['data']) 89 img = image_processing_utils.convert_capture_to_rgb_image(cap) 96 if img.shape[0] != size[1]: 98 if img.shape[1] != size[0]: 100 if img.shape[2] != 3: 103 img, _PATCH_X, _PATCH_Y, _PATCH_W, _PATCH_H) 107 return rgb, img 218 for img in yuv_imgs: 220 img, 223 for img in jpg_imgs: [all …]
|
D | test_yuv_plus_raw.py | 55 img = image_processing_utils.convert_capture_to_rgb_image(cap_yuv) 57 img, f'{log_path_with_name}_shading={shading_mode}_yuv.jpg', True) 59 img, _PATCH_X, _PATCH_Y, _PATCH_W, _PATCH_H) 64 img = image_processing_utils.convert_raw_capture_to_rgb_image( 68 img, f'{log_path_with_name}_shading={shading_mode}_{raw_fmt}.jpg', True) 73 img, _PATCH_X, _PATCH_Y, _PATCH_W, _PATCH_H)
|
/cts/apps/CameraITS/tools/ |
D | check_alignment.py | 52 img = image_processing_utils.convert_capture_to_rgb_image(cap, props) 59 image_processing_utils.write_image(img, img_name, True) 62 img *= 255 # cv2 needs images between [0,255] 64 img, img_name, _CIRCLE_MIN_AREA, _CIRCLE_COLOR) 65 opencv_processing_utils.append_circle_center_to_img(circle, img, img_name)
|
/cts/apps/CameraITS/tests/scene2_e/ |
D | test_continuous_picture.py | 71 img = image_processing_utils.convert_capture_to_rgb_image(cap) 73 img, _PATCH_X, _PATCH_Y, _PATCH_W, _PATCH_H) 80 imgs.append(img) 124 for i, img in enumerate(imgs): 126 img, f'{os.path.join(self.log_path, _NAME)}_{i}.jpg')
|
D | test_num_faces.py | 167 img = image_processing_utils.convert_capture_to_rgb_image( 176 faces, img, crop_region) 178 cv2.rectangle(img, (l, t), (r, b), _CV2_GREEN, 2) 182 image_processing_utils.write_image(img, img_name) 207 img, _CV2_FACE_SCALE_FACTOR, _CV2_FACE_MIN_NEIGHBORS) 210 faces_cropped, faces_opencv, img, img_name)
|
/cts/apps/CameraITS/tests/scene0/ |
D | test_solid_color_test_pattern.py | 73 def check_solid_color(img, exp_values, color, fmt): argument 99 rgb_means = [m*255 for m in image_processing_utils.compute_image_means(img)] 102 image_processing_utils.compute_image_variances(img)] 229 img = image_processing_utils.convert_capture_to_rgb_image( 232 img = image_processing_utils.convert_capture_to_rgb_image( 236 img, 243 img, color['RGB'], color['color'], fmt['format']) 246 img, _BLACK['RGB'], _BLACK['color'], fmt['format'])
|
D | test_test_patterns.py | 83 img = image_processing_utils.convert_capture_to_rgb_image(cap, props=props) 86 img = np.fliplr(img) 88 tile = image_processing_utils.get_image_patch(img, 150 img = image_processing_utils.convert_capture_to_rgb_image( 154 img, f'{name_with_log_path}_{pattern}.jpg', True)
|
/cts/apps/CameraITS/tests/scene_extensions/scene_hdr/ |
D | test_hdr_extension.py | 54 def extract_tile(img, file_stem_with_suffix): argument 63 img *= 255 # openCV needs [0:255] images 65 img, _MIN_QRCODE_AREA) 67 img, 68 square['left']/img.shape[1], 69 square['top']/img.shape[0], 70 square['w']/img.shape[1], 71 square['h']/img.shape[0] 87 def analyze_qr_code(img, file_stem_with_suffix): argument 105 tile = extract_tile(img, file_stem_with_suffix)
|
/cts/tests/camera/src/android/hardware/camera2/cts/ |
D | MultiResolutionImageReaderTest.java | 498 Image img = imgAndStreamInfo.image; in validateImage() local 501 " the expected width %d", img.getWidth(), streamInfoForImage.getWidth()), in validateImage() 502 img.getWidth(), streamInfoForImage.getWidth()); in validateImage() 504 " the expected height %d", img.getHeight(), streamInfoForImage.getHeight()), in validateImage() 505 img.getHeight(), streamInfoForImage.getHeight()); in validateImage() 508 CameraTestUtils.validateImage(img, img.getWidth(), img.getHeight(), format, in validateImage() 512 "expected format %d", img.getFormat(), format), format, img.getFormat()); in validateImage() 521 WAIT_FOR_RESULT_TIMEOUT_MS, img.getTimestamp()); in validateImage() 538 && streamInfo.getWidth() == img.getWidth() in validateImage() 539 && streamInfo.getHeight() == img.getHeight()) { in validateImage() [all …]
|
/cts/apps/CameraITS/tests/scene9/ |
D | test_jpeg_high_entropy.py | 123 img = image_processing_utils.convert_capture_to_rgb_image( 128 logging.debug('cap size (pixels): %d', img.shape[1]*img.shape[0]) 130 img, f'{test_name_with_log_path}_{zoom_ratio:.2f}{_JPEG_EXTENSION}') 133 img
|
/cts/apps/CameraITS/tests/scene2_c/ |
D | test_num_faces.py | 167 img = image_processing_utils.convert_capture_to_rgb_image( 176 faces, img, crop_region) 178 cv2.rectangle(img, (l, t), (r, b), _CV2_GREEN, 2) 182 image_processing_utils.write_image(img, img_name) 207 img, _CV2_FACE_SCALE_FACTOR, _CV2_FACE_MIN_NEIGHBORS) 210 faces_cropped, faces_opencv, img, img_name)
|
/cts/apps/CameraITS/tests/scene2_f/ |
D | test_num_faces.py | 167 img = image_processing_utils.convert_capture_to_rgb_image( 176 faces, img, crop_region) 178 cv2.rectangle(img, (l, t), (r, b), _CV2_GREEN, 2) 182 image_processing_utils.write_image(img, img_name) 207 img, _CV2_FACE_SCALE_FACTOR, _CV2_FACE_MIN_NEIGHBORS) 210 faces_cropped, faces_opencv, img, img_name)
|