1 /* Copyright 2017 The Chromium OS Authors. All rights reserved.
2 * Use of this source code is governed by a BSD-style license that can be
3 * found in the LICENSE file.
4 */
5
6 #include "arc/image_processor.h"
7
8 #include <errno.h>
9 #include <libyuv.h>
10 #include <time.h>
11
12 #include "arc/common.h"
13 #include "arc/common_types.h"
14 #include "arc/exif_utils.h"
15 #include "arc/jpeg_compressor.h"
16
17 namespace arc {
18
19 using android::CameraMetadata;
20
21 /*
22 * Formats have different names in different header files. Here is the mapping
23 * table:
24 *
25 * android_pixel_format_t videodev2.h FOURCC in libyuv
26 * -----------------------------------------------------------------------------
27 * HAL_PIXEL_FORMAT_YV12 = V4L2_PIX_FMT_YVU420 = FOURCC_YV12
28 * HAL_PIXEL_FORMAT_YCrCb_420_SP = V4L2_PIX_FMT_NV21 = FOURCC_NV21
29 * HAL_PIXEL_FORMAT_RGBA_8888 = V4L2_PIX_FMT_RGB32 = FOURCC_BGR4
30 * HAL_PIXEL_FORMAT_YCbCr_422_I = V4L2_PIX_FMT_YUYV = FOURCC_YUYV
31 * = FOURCC_YUY2
32 * V4L2_PIX_FMT_YUV420 = FOURCC_I420
33 * = FOURCC_YU12
34 * V4L2_PIX_FMT_MJPEG = FOURCC_MJPG
35 *
36 * Camera device generates FOURCC_YUYV and FOURCC_MJPG.
37 * Preview needs FOURCC_ARGB format.
38 * Software video encoder needs FOURCC_YU12.
39 * CTS requires FOURCC_YV12 and FOURCC_NV21 for applications.
40 *
41 * Android stride requirement:
42 * YV12 horizontal stride should be a multiple of 16 pixels. See
43 * android.graphics.ImageFormat.YV12.
44 * The stride of ARGB, YU12, and NV21 are always equal to the width.
45 *
46 * Conversion Path:
47 * MJPG/YUYV (from camera) -> YU12 -> ARGB (preview)
48 * -> NV21 (apps)
49 * -> YV12 (apps)
50 * -> YU12 (video encoder)
51 */
52
53 // YV12 horizontal stride should be a multiple of 16 pixels for each plane.
54 // |dst_stride_uv| is the pixel stride of u or v plane.
55 static int YU12ToYV12(const void* yv12, void* yu12, int width, int height,
56 int dst_stride_y, int dst_stride_uv);
57 static int YU12ToNV21(const void* yv12, void* nv21, int width, int height);
58 static bool ConvertToJpeg(const CameraMetadata& metadata,
59 const FrameBuffer& in_frame, FrameBuffer* out_frame);
60 static bool SetExifTags(const CameraMetadata& metadata, ExifUtils* utils);
61
62 // How precise the float-to-rational conversion for EXIF tags would be.
63 static const int kRationalPrecision = 10000;
64
65 // Default JPEG quality settings.
66 static const int DEFAULT_JPEG_QUALITY = 80;
67
Align16(size_t value)68 inline static size_t Align16(size_t value) { return (value + 15) & ~15; }
69
GetConvertedSize(int fourcc,uint32_t width,uint32_t height)70 size_t ImageProcessor::GetConvertedSize(int fourcc, uint32_t width,
71 uint32_t height) {
72 if ((width % 2) || (height % 2)) {
73 LOGF(ERROR) << "Width or height is not even (" << width << " x " << height
74 << ")";
75 return 0;
76 }
77
78 switch (fourcc) {
79 case V4L2_PIX_FMT_YVU420: // YV12
80 return Align16(width) * height + Align16(width / 2) * height;
81 case V4L2_PIX_FMT_YUV420: // YU12
82 // Fall-through.
83 case V4L2_PIX_FMT_NV21: // NV21
84 return width * height * 3 / 2;
85 case V4L2_PIX_FMT_BGR32:
86 case V4L2_PIX_FMT_RGB32:
87 return width * height * 4;
88 default:
89 LOGF(ERROR) << "Pixel format " << FormatToString(fourcc)
90 << " is unsupported.";
91 return 0;
92 }
93 }
94
SupportsConversion(uint32_t from_fourcc,uint32_t to_fourcc)95 bool ImageProcessor::SupportsConversion(uint32_t from_fourcc,
96 uint32_t to_fourcc) {
97 switch (from_fourcc) {
98 case V4L2_PIX_FMT_YUYV:
99 return (to_fourcc == V4L2_PIX_FMT_YUV420);
100 case V4L2_PIX_FMT_YUV420:
101 return (
102 to_fourcc == V4L2_PIX_FMT_YUV420 ||
103 to_fourcc == V4L2_PIX_FMT_YVU420 || to_fourcc == V4L2_PIX_FMT_NV21 ||
104 to_fourcc == V4L2_PIX_FMT_RGB32 || to_fourcc == V4L2_PIX_FMT_BGR32 ||
105 to_fourcc == V4L2_PIX_FMT_JPEG);
106 case V4L2_PIX_FMT_MJPEG:
107 return (to_fourcc == V4L2_PIX_FMT_YUV420);
108 default:
109 return false;
110 }
111 }
112
ConvertFormat(const CameraMetadata & metadata,const FrameBuffer & in_frame,FrameBuffer * out_frame)113 int ImageProcessor::ConvertFormat(const CameraMetadata& metadata,
114 const FrameBuffer& in_frame,
115 FrameBuffer* out_frame) {
116 if ((in_frame.GetWidth() % 2) || (in_frame.GetHeight() % 2)) {
117 LOGF(ERROR) << "Width or height is not even (" << in_frame.GetWidth()
118 << " x " << in_frame.GetHeight() << ")";
119 return -EINVAL;
120 }
121
122 size_t data_size = GetConvertedSize(
123 out_frame->GetFourcc(), in_frame.GetWidth(), in_frame.GetHeight());
124
125 if (out_frame->SetDataSize(data_size)) {
126 LOGF(ERROR) << "Set data size failed";
127 return -EINVAL;
128 }
129
130 if (in_frame.GetFourcc() == V4L2_PIX_FMT_YUYV) {
131 switch (out_frame->GetFourcc()) {
132 case V4L2_PIX_FMT_YUV420: // YU12
133 {
134 int res = libyuv::YUY2ToI420(
135 in_frame.GetData(), /* src_yuy2 */
136 in_frame.GetWidth() * 2, /* src_stride_yuy2 */
137 out_frame->GetData(), /* dst_y */
138 out_frame->GetWidth(), /* dst_stride_y */
139 out_frame->GetData() +
140 out_frame->GetWidth() * out_frame->GetHeight(), /* dst_u */
141 out_frame->GetWidth() / 2, /* dst_stride_u */
142 out_frame->GetData() + out_frame->GetWidth() *
143 out_frame->GetHeight() * 5 /
144 4, /* dst_v */
145 out_frame->GetWidth() / 2, /* dst_stride_v */
146 in_frame.GetWidth(), in_frame.GetHeight());
147 LOGF_IF(ERROR, res) << "YUY2ToI420() for YU12 returns " << res;
148 return res ? -EINVAL : 0;
149 }
150 default:
151 LOGF(ERROR) << "Destination pixel format "
152 << FormatToString(out_frame->GetFourcc())
153 << " is unsupported for YUYV source format.";
154 return -EINVAL;
155 }
156 } else if (in_frame.GetFourcc() == V4L2_PIX_FMT_YUV420) {
157 // V4L2_PIX_FMT_YVU420 is YV12. I420 is usually referred to YU12
158 // (V4L2_PIX_FMT_YUV420), and YV12 is similar to YU12 except that U/V
159 // planes are swapped.
160 switch (out_frame->GetFourcc()) {
161 case V4L2_PIX_FMT_YVU420: // YV12
162 {
163 int ystride = Align16(in_frame.GetWidth());
164 int uvstride = Align16(in_frame.GetWidth() / 2);
165 int res = YU12ToYV12(in_frame.GetData(), out_frame->GetData(),
166 in_frame.GetWidth(), in_frame.GetHeight(), ystride,
167 uvstride);
168 LOGF_IF(ERROR, res) << "YU12ToYV12() returns " << res;
169 return res ? -EINVAL : 0;
170 }
171 case V4L2_PIX_FMT_YUV420: // YU12
172 {
173 memcpy(out_frame->GetData(), in_frame.GetData(),
174 in_frame.GetDataSize());
175 return 0;
176 }
177 case V4L2_PIX_FMT_NV21: // NV21
178 {
179 // TODO(henryhsu): Use libyuv::I420ToNV21.
180 int res = YU12ToNV21(in_frame.GetData(), out_frame->GetData(),
181 in_frame.GetWidth(), in_frame.GetHeight());
182 LOGF_IF(ERROR, res) << "YU12ToNV21() returns " << res;
183 return res ? -EINVAL : 0;
184 }
185 case V4L2_PIX_FMT_BGR32: {
186 int res = libyuv::I420ToABGR(
187 in_frame.GetData(), /* src_y */
188 in_frame.GetWidth(), /* src_stride_y */
189 in_frame.GetData() +
190 in_frame.GetWidth() * in_frame.GetHeight(), /* src_u */
191 in_frame.GetWidth() / 2, /* src_stride_u */
192 in_frame.GetData() +
193 in_frame.GetWidth() * in_frame.GetHeight() * 5 / 4, /* src_v */
194 in_frame.GetWidth() / 2, /* src_stride_v */
195 out_frame->GetData(), /* dst_abgr */
196 out_frame->GetWidth() * 4, /* dst_stride_abgr */
197 in_frame.GetWidth(), in_frame.GetHeight());
198 LOGF_IF(ERROR, res) << "I420ToABGR() returns " << res;
199 return res ? -EINVAL : 0;
200 }
201 case V4L2_PIX_FMT_RGB32: {
202 int res = libyuv::I420ToARGB(
203 in_frame.GetData(), /* src_y */
204 in_frame.GetWidth(), /* src_stride_y */
205 in_frame.GetData() +
206 in_frame.GetWidth() * in_frame.GetHeight(), /* src_u */
207 in_frame.GetWidth() / 2, /* src_stride_u */
208 in_frame.GetData() +
209 in_frame.GetWidth() * in_frame.GetHeight() * 5 / 4, /* src_v */
210 in_frame.GetWidth() / 2, /* src_stride_v */
211 out_frame->GetData(), /* dst_argb */
212 out_frame->GetWidth() * 4, /* dst_stride_argb */
213 in_frame.GetWidth(), in_frame.GetHeight());
214 LOGF_IF(ERROR, res) << "I420ToARGB() returns " << res;
215 return res ? -EINVAL : 0;
216 }
217 case V4L2_PIX_FMT_JPEG: {
218 bool res = ConvertToJpeg(metadata, in_frame, out_frame);
219 LOGF_IF(ERROR, !res) << "ConvertToJpeg() returns " << res;
220 return res ? -EINVAL : 0;
221 }
222 default:
223 LOGF(ERROR) << "Destination pixel format "
224 << FormatToString(out_frame->GetFourcc())
225 << " is unsupported for YU12 source format.";
226 return -EINVAL;
227 }
228 } else if (in_frame.GetFourcc() == V4L2_PIX_FMT_MJPEG) {
229 switch (out_frame->GetFourcc()) {
230 case V4L2_PIX_FMT_YUV420: // YU12
231 {
232 int res = libyuv::MJPGToI420(
233 in_frame.GetData(), /* sample */
234 in_frame.GetDataSize(), /* sample_size */
235 out_frame->GetData(), /* dst_y */
236 out_frame->GetWidth(), /* dst_stride_y */
237 out_frame->GetData() +
238 out_frame->GetWidth() * out_frame->GetHeight(), /* dst_u */
239 out_frame->GetWidth() / 2, /* dst_stride_u */
240 out_frame->GetData() + out_frame->GetWidth() *
241 out_frame->GetHeight() * 5 /
242 4, /* dst_v */
243 out_frame->GetWidth() / 2, /* dst_stride_v */
244 in_frame.GetWidth(), in_frame.GetHeight(), out_frame->GetWidth(),
245 out_frame->GetHeight());
246 LOGF_IF(ERROR, res) << "MJPEGToI420() returns " << res;
247 return res ? -EINVAL : 0;
248 }
249 default:
250 LOGF(ERROR) << "Destination pixel format "
251 << FormatToString(out_frame->GetFourcc())
252 << " is unsupported for MJPEG source format.";
253 return -EINVAL;
254 }
255 } else {
256 LOGF(ERROR) << "Convert format doesn't support source format "
257 << FormatToString(in_frame.GetFourcc());
258 return -EINVAL;
259 }
260 }
261
Scale(const FrameBuffer & in_frame,FrameBuffer * out_frame)262 int ImageProcessor::Scale(const FrameBuffer& in_frame, FrameBuffer* out_frame) {
263 if (in_frame.GetFourcc() != V4L2_PIX_FMT_YUV420) {
264 LOGF(ERROR) << "Pixel format " << FormatToString(in_frame.GetFourcc())
265 << " is unsupported.";
266 return -EINVAL;
267 }
268
269 size_t data_size = GetConvertedSize(
270 in_frame.GetFourcc(), out_frame->GetWidth(), out_frame->GetHeight());
271
272 if (out_frame->SetDataSize(data_size)) {
273 LOGF(ERROR) << "Set data size failed";
274 return -EINVAL;
275 }
276 out_frame->SetFourcc(in_frame.GetFourcc());
277
278 VLOGF(1) << "Scale image from " << in_frame.GetWidth() << "x"
279 << in_frame.GetHeight() << " to " << out_frame->GetWidth() << "x"
280 << out_frame->GetHeight();
281
282 int ret = libyuv::I420Scale(
283 in_frame.GetData(), in_frame.GetWidth(),
284 in_frame.GetData() + in_frame.GetWidth() * in_frame.GetHeight(),
285 in_frame.GetWidth() / 2,
286 in_frame.GetData() + in_frame.GetWidth() * in_frame.GetHeight() * 5 / 4,
287 in_frame.GetWidth() / 2, in_frame.GetWidth(), in_frame.GetHeight(),
288 out_frame->GetData(), out_frame->GetWidth(),
289 out_frame->GetData() + out_frame->GetWidth() * out_frame->GetHeight(),
290 out_frame->GetWidth() / 2,
291 out_frame->GetData() +
292 out_frame->GetWidth() * out_frame->GetHeight() * 5 / 4,
293 out_frame->GetWidth() / 2, out_frame->GetWidth(), out_frame->GetHeight(),
294 libyuv::FilterMode::kFilterNone);
295 LOGF_IF(ERROR, ret) << "I420Scale failed: " << ret;
296 return ret;
297 }
298
YU12ToYV12(const void * yu12,void * yv12,int width,int height,int dst_stride_y,int dst_stride_uv)299 static int YU12ToYV12(const void* yu12, void* yv12, int width, int height,
300 int dst_stride_y, int dst_stride_uv) {
301 if ((width % 2) || (height % 2)) {
302 LOGF(ERROR) << "Width or height is not even (" << width << " x " << height
303 << ")";
304 return -EINVAL;
305 }
306 if (dst_stride_y < width || dst_stride_uv < width / 2) {
307 LOGF(ERROR) << "Y plane stride (" << dst_stride_y
308 << ") or U/V plane stride (" << dst_stride_uv
309 << ") is invalid for width " << width;
310 return -EINVAL;
311 }
312
313 const uint8_t* src = reinterpret_cast<const uint8_t*>(yu12);
314 uint8_t* dst = reinterpret_cast<uint8_t*>(yv12);
315 const uint8_t* u_src = src + width * height;
316 uint8_t* u_dst = dst + dst_stride_y * height + dst_stride_uv * height / 2;
317 const uint8_t* v_src = src + width * height * 5 / 4;
318 uint8_t* v_dst = dst + dst_stride_y * height;
319
320 return libyuv::I420Copy(src, width, u_src, width / 2, v_src, width / 2, dst,
321 dst_stride_y, u_dst, dst_stride_uv, v_dst,
322 dst_stride_uv, width, height);
323 }
324
YU12ToNV21(const void * yu12,void * nv21,int width,int height)325 static int YU12ToNV21(const void* yu12, void* nv21, int width, int height) {
326 if ((width % 2) || (height % 2)) {
327 LOGF(ERROR) << "Width or height is not even (" << width << " x " << height
328 << ")";
329 return -EINVAL;
330 }
331
332 const uint8_t* src = reinterpret_cast<const uint8_t*>(yu12);
333 uint8_t* dst = reinterpret_cast<uint8_t*>(nv21);
334 const uint8_t* u_src = src + width * height;
335 const uint8_t* v_src = src + width * height * 5 / 4;
336 uint8_t* vu_dst = dst + width * height;
337
338 memcpy(dst, src, width * height);
339
340 for (int i = 0; i < height / 2; i++) {
341 for (int j = 0; j < width / 2; j++) {
342 *vu_dst++ = *v_src++;
343 *vu_dst++ = *u_src++;
344 }
345 }
346 return 0;
347 }
348
ConvertToJpeg(const CameraMetadata & metadata,const FrameBuffer & in_frame,FrameBuffer * out_frame)349 static bool ConvertToJpeg(const CameraMetadata& metadata,
350 const FrameBuffer& in_frame, FrameBuffer* out_frame) {
351 ExifUtils utils;
352 int jpeg_quality, thumbnail_jpeg_quality;
353 camera_metadata_ro_entry entry;
354
355 if (metadata.exists(ANDROID_JPEG_QUALITY)) {
356 entry = metadata.find(ANDROID_JPEG_QUALITY);
357 jpeg_quality = entry.data.u8[0];
358 } else {
359 LOGF(ERROR) << "Could not find jpeg quality in metadata, defaulting to "
360 << DEFAULT_JPEG_QUALITY;
361 jpeg_quality = DEFAULT_JPEG_QUALITY;
362 }
363 if (metadata.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
364 entry = metadata.find(ANDROID_JPEG_THUMBNAIL_QUALITY);
365 thumbnail_jpeg_quality = entry.data.u8[0];
366 } else {
367 thumbnail_jpeg_quality = jpeg_quality;
368 }
369
370 if (!utils.Initialize(in_frame.GetData(), in_frame.GetWidth(),
371 in_frame.GetHeight(), thumbnail_jpeg_quality)) {
372 LOGF(ERROR) << "ExifUtils initialization failed.";
373 return false;
374 }
375 if (!SetExifTags(metadata, &utils)) {
376 LOGF(ERROR) << "Setting Exif tags failed.";
377 return false;
378 }
379 if (!utils.GenerateApp1()) {
380 LOGF(ERROR) << "Generating APP1 segment failed.";
381 return false;
382 }
383 JpegCompressor compressor;
384 if (!compressor.CompressImage(in_frame.GetData(), in_frame.GetWidth(),
385 in_frame.GetHeight(), jpeg_quality,
386 utils.GetApp1Buffer(), utils.GetApp1Length())) {
387 LOGF(ERROR) << "JPEG image compression failed";
388 return false;
389 }
390 size_t buffer_length = compressor.GetCompressedImageSize();
391 memcpy(out_frame->GetData(), compressor.GetCompressedImagePtr(),
392 buffer_length);
393 return true;
394 }
395
SetExifTags(const CameraMetadata & metadata,ExifUtils * utils)396 static bool SetExifTags(const CameraMetadata& metadata, ExifUtils* utils) {
397 time_t raw_time = 0;
398 struct tm time_info;
399 bool time_available = time(&raw_time) != -1;
400 localtime_r(&raw_time, &time_info);
401 if (!utils->SetDateTime(time_info)) {
402 LOGF(ERROR) << "Setting data time failed.";
403 return false;
404 }
405
406 float focal_length;
407 camera_metadata_ro_entry entry = metadata.find(ANDROID_LENS_FOCAL_LENGTH);
408 if (entry.count) {
409 focal_length = entry.data.f[0];
410 } else {
411 LOGF(ERROR) << "Cannot find focal length in metadata.";
412 return false;
413 }
414 if (!utils->SetFocalLength(
415 static_cast<uint32_t>(focal_length * kRationalPrecision),
416 kRationalPrecision)) {
417 LOGF(ERROR) << "Setting focal length failed.";
418 return false;
419 }
420
421 if (metadata.exists(ANDROID_JPEG_GPS_COORDINATES)) {
422 entry = metadata.find(ANDROID_JPEG_GPS_COORDINATES);
423 if (entry.count < 3) {
424 LOGF(ERROR) << "Gps coordinates in metadata is not complete.";
425 return false;
426 }
427 if (!utils->SetGpsLatitude(entry.data.d[0])) {
428 LOGF(ERROR) << "Setting gps latitude failed.";
429 return false;
430 }
431 if (!utils->SetGpsLongitude(entry.data.d[1])) {
432 LOGF(ERROR) << "Setting gps longitude failed.";
433 return false;
434 }
435 if (!utils->SetGpsAltitude(entry.data.d[2])) {
436 LOGF(ERROR) << "Setting gps altitude failed.";
437 return false;
438 }
439 }
440
441 if (metadata.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
442 entry = metadata.find(ANDROID_JPEG_GPS_PROCESSING_METHOD);
443 std::string method_str(reinterpret_cast<const char*>(entry.data.u8));
444 if (!utils->SetGpsProcessingMethod(method_str)) {
445 LOGF(ERROR) << "Setting gps processing method failed.";
446 return false;
447 }
448 }
449
450 if (time_available && metadata.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
451 entry = metadata.find(ANDROID_JPEG_GPS_TIMESTAMP);
452 time_t timestamp = static_cast<time_t>(entry.data.i64[0]);
453 if (gmtime_r(×tamp, &time_info)) {
454 if (!utils->SetGpsTimestamp(time_info)) {
455 LOGF(ERROR) << "Setting gps timestamp failed.";
456 return false;
457 }
458 } else {
459 LOGF(ERROR) << "Time tranformation failed.";
460 return false;
461 }
462 }
463
464 if (metadata.exists(ANDROID_JPEG_ORIENTATION)) {
465 entry = metadata.find(ANDROID_JPEG_ORIENTATION);
466 if (!utils->SetOrientation(entry.data.i32[0])) {
467 LOGF(ERROR) << "Setting orientation failed.";
468 return false;
469 }
470 }
471
472 if (metadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
473 entry = metadata.find(ANDROID_JPEG_THUMBNAIL_SIZE);
474 if (entry.count < 2) {
475 LOGF(ERROR) << "Thumbnail size in metadata is not complete.";
476 return false;
477 }
478 int thumbnail_width = entry.data.i32[0];
479 int thumbnail_height = entry.data.i32[1];
480 if (thumbnail_width > 0 && thumbnail_height > 0) {
481 if (!utils->SetThumbnailSize(static_cast<uint16_t>(thumbnail_width),
482 static_cast<uint16_t>(thumbnail_height))) {
483 LOGF(ERROR) << "Setting thumbnail size failed.";
484 return false;
485 }
486 }
487 }
488 return true;
489 }
490
491 } // namespace arc
492