1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /*
18  * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19  * functionality of an advanced fake camera.
20  */
21 
22 #include <inttypes.h>
23 
24 //#define LOG_NDEBUG 0
25 //#define LOG_NNDEBUG 0
26 #define LOG_TAG "EmulatedCamera_FakeCamera3"
27 #include <cutils/properties.h>
28 #include <utils/Log.h>
29 
30 #include "EmulatedFakeCamera3.h"
31 #include "EmulatedCameraFactory.h"
32 #include <ui/Fence.h>
33 #include <ui/Rect.h>
34 #include <ui/GraphicBufferMapper.h>
35 #include "gralloc_cb.h"
36 
37 #include "fake-pipeline2/Sensor.h"
38 #include "fake-pipeline2/JpegCompressor.h"
39 #include <cmath>
40 
41 #include <vector>
42 
43 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
44 #define ALOGVV ALOGV
45 #else
46 #define ALOGVV(...) ((void)0)
47 #endif
48 
49 namespace android {
50 
51 /**
52  * Constants for camera capabilities
53  */
54 
55 const int64_t USEC = 1000LL;
56 const int64_t MSEC = USEC * 1000LL;
57 const int64_t SEC = MSEC * 1000LL;
58 
59 const int32_t EmulatedFakeCamera3::kAvailableFormats[] = {
60         HAL_PIXEL_FORMAT_RAW16,
61         HAL_PIXEL_FORMAT_BLOB,
62         HAL_PIXEL_FORMAT_RGBA_8888,
63         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
64         // These are handled by YCbCr_420_888
65         //        HAL_PIXEL_FORMAT_YV12,
66         //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
67         HAL_PIXEL_FORMAT_YCbCr_420_888,
68         HAL_PIXEL_FORMAT_Y16
69 };
70 
71 /**
72  * 3A constants
73  */
74 
75 // Default exposure and gain targets for different scenarios
76 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime       = 10 * MSEC;
77 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
78 const int     EmulatedFakeCamera3::kNormalSensitivity        = 100;
79 const int     EmulatedFakeCamera3::kFacePrioritySensitivity  = 400;
80 const float   EmulatedFakeCamera3::kExposureTrackRate        = 0.1;
81 const int     EmulatedFakeCamera3::kPrecaptureMinFrames      = 10;
82 const int     EmulatedFakeCamera3::kStableAeMaxFrames        = 100;
83 const float   EmulatedFakeCamera3::kExposureWanderMin        = -2;
84 const float   EmulatedFakeCamera3::kExposureWanderMax        = 1;
85 
86 /**
87  * Camera device lifecycle methods
88  */
89 
EmulatedFakeCamera3(int cameraId,bool facingBack,struct hw_module_t * module)90 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
91         struct hw_module_t* module) :
92         EmulatedCamera3(cameraId, module),
93         mFacingBack(facingBack) {
94     ALOGI("Constructing emulated fake camera 3: ID %d, facing %s",
95             mCameraID, facingBack ? "back" : "front");
96 
97     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
98         mDefaultTemplates[i] = NULL;
99     }
100 
101 }
102 
~EmulatedFakeCamera3()103 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
104     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
105         if (mDefaultTemplates[i] != NULL) {
106             free_camera_metadata(mDefaultTemplates[i]);
107         }
108     }
109 }
110 
Initialize()111 status_t EmulatedFakeCamera3::Initialize() {
112     ALOGV("%s: E", __FUNCTION__);
113     status_t res;
114 
115     if (mStatus != STATUS_ERROR) {
116         ALOGE("%s: Already initialized!", __FUNCTION__);
117         return INVALID_OPERATION;
118     }
119 
120     res = getCameraCapabilities();
121     if (res != OK) {
122         ALOGE("%s: Unable to get camera capabilities: %s (%d)",
123                 __FUNCTION__, strerror(-res), res);
124         return res;
125     }
126 
127     res = constructStaticInfo();
128     if (res != OK) {
129         ALOGE("%s: Unable to allocate static info: %s (%d)",
130                 __FUNCTION__, strerror(-res), res);
131         return res;
132     }
133 
134     return EmulatedCamera3::Initialize();
135 }
136 
connectCamera(hw_device_t ** device)137 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
138     ALOGV("%s: E", __FUNCTION__);
139     Mutex::Autolock l(mLock);
140     status_t res;
141 
142     if (mStatus != STATUS_CLOSED) {
143         ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
144         return INVALID_OPERATION;
145     }
146 
147     mSensor = new Sensor();
148     mSensor->setSensorListener(this);
149 
150     res = mSensor->startUp();
151     if (res != NO_ERROR) return res;
152 
153     mReadoutThread = new ReadoutThread(this);
154     mJpegCompressor = new JpegCompressor();
155 
156     res = mReadoutThread->run("EmuCam3::readoutThread");
157     if (res != NO_ERROR) return res;
158 
159     // Initialize fake 3A
160 
161     mControlMode  = ANDROID_CONTROL_MODE_AUTO;
162     mFacePriority = false;
163     mAeMode       = ANDROID_CONTROL_AE_MODE_ON;
164     mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
165     mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
166     mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
167     mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
168     mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
169     mAeCounter    = 0;
170     mAeTargetExposureTime = kNormalExposureTime;
171     mAeCurrentExposureTime = kNormalExposureTime;
172     mAeCurrentSensitivity  = kNormalSensitivity;
173 
174     return EmulatedCamera3::connectCamera(device);
175 }
176 
closeCamera()177 status_t EmulatedFakeCamera3::closeCamera() {
178     ALOGV("%s: E", __FUNCTION__);
179     status_t res;
180     {
181         Mutex::Autolock l(mLock);
182         if (mStatus == STATUS_CLOSED) return OK;
183 
184         res = mSensor->shutDown();
185         if (res != NO_ERROR) {
186             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
187             return res;
188         }
189         mSensor.clear();
190 
191         mReadoutThread->requestExit();
192     }
193 
194     mReadoutThread->join();
195 
196     {
197         Mutex::Autolock l(mLock);
198         // Clear out private stream information
199         for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
200             PrivateStreamInfo *privStream =
201                     static_cast<PrivateStreamInfo*>((*s)->priv);
202             delete privStream;
203             (*s)->priv = NULL;
204         }
205         mStreams.clear();
206         mReadoutThread.clear();
207     }
208 
209     return EmulatedCamera3::closeCamera();
210 }
211 
getCameraInfo(struct camera_info * info)212 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
213     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
214     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
215     return EmulatedCamera3::getCameraInfo(info);
216 }
217 
218 /**
219  * Camera3 interface methods
220  */
221 
configureStreams(camera3_stream_configuration * streamList)222 status_t EmulatedFakeCamera3::configureStreams(
223         camera3_stream_configuration *streamList) {
224     Mutex::Autolock l(mLock);
225     ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
226 
227     if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
228         ALOGE("%s: Cannot configure streams in state %d",
229                 __FUNCTION__, mStatus);
230         return NO_INIT;
231     }
232 
233     /**
234      * Sanity-check input list.
235      */
236     if (streamList == NULL) {
237         ALOGE("%s: NULL stream configuration", __FUNCTION__);
238         return BAD_VALUE;
239     }
240 
241     if (streamList->streams == NULL) {
242         ALOGE("%s: NULL stream list", __FUNCTION__);
243         return BAD_VALUE;
244     }
245 
246     if (streamList->num_streams < 1) {
247         ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
248                 streamList->num_streams);
249         return BAD_VALUE;
250     }
251 
252     camera3_stream_t *inputStream = NULL;
253     for (size_t i = 0; i < streamList->num_streams; i++) {
254         camera3_stream_t *newStream = streamList->streams[i];
255 
256         if (newStream == NULL) {
257             ALOGE("%s: Stream index %zu was NULL",
258                   __FUNCTION__, i);
259             return BAD_VALUE;
260         }
261 
262         ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
263                 __FUNCTION__, newStream, i, newStream->stream_type,
264                 newStream->usage,
265                 newStream->format);
266 
267         if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
268             newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
269             if (inputStream != NULL) {
270 
271                 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
272                 return BAD_VALUE;
273             }
274             inputStream = newStream;
275         }
276 
277         bool validFormat = false;
278         for (size_t f = 0;
279              f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
280              f++) {
281             if (newStream->format == kAvailableFormats[f]) {
282                 validFormat = true;
283                 break;
284             }
285         }
286         if (!validFormat) {
287             ALOGE("%s: Unsupported stream format 0x%x requested",
288                     __FUNCTION__, newStream->format);
289             return BAD_VALUE;
290         }
291     }
292     mInputStream = inputStream;
293 
294     /**
295      * Initially mark all existing streams as not alive
296      */
297     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
298         PrivateStreamInfo *privStream =
299                 static_cast<PrivateStreamInfo*>((*s)->priv);
300         privStream->alive = false;
301     }
302 
303     /**
304      * Find new streams and mark still-alive ones
305      */
306     for (size_t i = 0; i < streamList->num_streams; i++) {
307         camera3_stream_t *newStream = streamList->streams[i];
308         if (newStream->priv == NULL) {
309             // New stream, construct info
310             PrivateStreamInfo *privStream = new PrivateStreamInfo();
311             privStream->alive = true;
312 
313             newStream->max_buffers = kMaxBufferCount;
314             newStream->priv = privStream;
315             mStreams.push_back(newStream);
316         } else {
317             // Existing stream, mark as still alive.
318             PrivateStreamInfo *privStream =
319                     static_cast<PrivateStreamInfo*>(newStream->priv);
320             privStream->alive = true;
321         }
322         // Always update usage and max buffers
323         newStream->max_buffers = kMaxBufferCount;
324         switch (newStream->stream_type) {
325             case CAMERA3_STREAM_OUTPUT:
326                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
327                 break;
328             case CAMERA3_STREAM_INPUT:
329                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
330                 break;
331             case CAMERA3_STREAM_BIDIRECTIONAL:
332                 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
333                         GRALLOC_USAGE_HW_CAMERA_WRITE;
334                 break;
335         }
336     }
337 
338     /**
339      * Reap the dead streams
340      */
341     for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
342         PrivateStreamInfo *privStream =
343                 static_cast<PrivateStreamInfo*>((*s)->priv);
344         if (!privStream->alive) {
345             (*s)->priv = NULL;
346             delete privStream;
347             s = mStreams.erase(s);
348         } else {
349             ++s;
350         }
351     }
352 
353     /**
354      * Can't reuse settings across configure call
355      */
356     mPrevSettings.clear();
357 
358     return OK;
359 }
360 
registerStreamBuffers(const camera3_stream_buffer_set * bufferSet)361 status_t EmulatedFakeCamera3::registerStreamBuffers(
362         const camera3_stream_buffer_set *bufferSet) {
363     ALOGV("%s: E", __FUNCTION__);
364     Mutex::Autolock l(mLock);
365 
366     // Should not be called in HAL versions >= 3.2
367 
368     ALOGE("%s: Should not be invoked on new HALs!",
369             __FUNCTION__);
370     return NO_INIT;
371 }
372 
constructDefaultRequestSettings(int type)373 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
374         int type) {
375     ALOGV("%s: E", __FUNCTION__);
376     Mutex::Autolock l(mLock);
377 
378     if (type < 0 || type >= CAMERA3_TEMPLATE_COUNT) {
379         ALOGE("%s: Unknown request settings template: %d",
380                 __FUNCTION__, type);
381         return NULL;
382     }
383 
384     if (!hasCapability(BACKWARD_COMPATIBLE) && type != CAMERA3_TEMPLATE_PREVIEW) {
385         ALOGE("%s: Template %d not supported w/o BACKWARD_COMPATIBLE capability",
386                 __FUNCTION__, type);
387         return NULL;
388     }
389 
390     /**
391      * Cache is not just an optimization - pointer returned has to live at
392      * least as long as the camera device instance does.
393      */
394     if (mDefaultTemplates[type] != NULL) {
395         return mDefaultTemplates[type];
396     }
397 
398     CameraMetadata settings;
399 
400     /** android.request */
401 
402     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
403     settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
404 
405     static const int32_t id = 0;
406     settings.update(ANDROID_REQUEST_ID, &id, 1);
407 
408     static const int32_t frameCount = 0;
409     settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
410 
411     /** android.lens */
412 
413     static const float focalLength = 5.0f;
414     settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
415 
416     if (hasCapability(BACKWARD_COMPATIBLE)) {
417         static const float focusDistance = 0;
418         settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
419 
420         static const float aperture = 2.8f;
421         settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
422 
423         static const float filterDensity = 0;
424         settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
425 
426         static const uint8_t opticalStabilizationMode =
427                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
428         settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
429                 &opticalStabilizationMode, 1);
430 
431         // FOCUS_RANGE set only in frame
432     }
433 
434     /** android.sensor */
435 
436     if (hasCapability(MANUAL_SENSOR)) {
437         static const int64_t exposureTime = 10 * MSEC;
438         settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
439 
440         static const int64_t frameDuration = 33333333L; // 1/30 s
441         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
442 
443         static const int32_t sensitivity = 100;
444         settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
445     }
446 
447     // TIMESTAMP set only in frame
448 
449     /** android.flash */
450 
451     if (hasCapability(BACKWARD_COMPATIBLE)) {
452         static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
453         settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
454 
455         static const uint8_t flashPower = 10;
456         settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
457 
458         static const int64_t firingTime = 0;
459         settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
460     }
461 
462     /** Processing block modes */
463     if (hasCapability(MANUAL_POST_PROCESSING)) {
464         uint8_t hotPixelMode = 0;
465         uint8_t demosaicMode = 0;
466         uint8_t noiseMode = 0;
467         uint8_t shadingMode = 0;
468         uint8_t colorMode = 0;
469         uint8_t tonemapMode = 0;
470         uint8_t edgeMode = 0;
471         switch (type) {
472             case CAMERA3_TEMPLATE_STILL_CAPTURE:
473                 // fall-through
474             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
475                 // fall-through
476             case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
477                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
478                 demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
479                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
480                 shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
481                 colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
482                 tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
483                 edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
484                 break;
485             case CAMERA3_TEMPLATE_PREVIEW:
486                 // fall-through
487             case CAMERA3_TEMPLATE_VIDEO_RECORD:
488                 // fall-through
489             default:
490                 hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
491                 demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
492                 noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
493                 shadingMode = ANDROID_SHADING_MODE_FAST;
494                 colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
495                 tonemapMode = ANDROID_TONEMAP_MODE_FAST;
496                 edgeMode = ANDROID_EDGE_MODE_FAST;
497                 break;
498         }
499         settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
500         settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
501         settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
502         settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
503         settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
504         settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
505         settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
506     }
507 
508     /** android.colorCorrection */
509 
510     if (hasCapability(MANUAL_POST_PROCESSING)) {
511         static const camera_metadata_rational colorTransform[9] = {
512             {1,1}, {0,1}, {0,1},
513             {0,1}, {1,1}, {0,1},
514             {0,1}, {0,1}, {1,1}
515         };
516         settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
517 
518         static const float colorGains[4] = {
519             1.0f, 1.0f, 1.0f, 1.0f
520         };
521         settings.update(ANDROID_COLOR_CORRECTION_GAINS, colorGains, 4);
522     }
523 
524     /** android.tonemap */
525 
526     if (hasCapability(MANUAL_POST_PROCESSING)) {
527         static const float tonemapCurve[4] = {
528             0.f, 0.f,
529             1.f, 1.f
530         };
531         settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
532         settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
533         settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
534     }
535 
536     /** android.scaler */
537     if (hasCapability(BACKWARD_COMPATIBLE)) {
538         static const int32_t cropRegion[4] = {
539             0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1]
540         };
541         settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 4);
542     }
543 
544     /** android.jpeg */
545     if (hasCapability(BACKWARD_COMPATIBLE)) {
546         static const uint8_t jpegQuality = 80;
547         settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
548 
549         static const int32_t thumbnailSize[2] = {
550             640, 480
551         };
552         settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
553 
554         static const uint8_t thumbnailQuality = 80;
555         settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
556 
557         static const double gpsCoordinates[2] = {
558             0, 0
559         };
560         settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
561 
562         static const uint8_t gpsProcessingMethod[32] = "None";
563         settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
564 
565         static const int64_t gpsTimestamp = 0;
566         settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
567 
568         static const int32_t jpegOrientation = 0;
569         settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
570     }
571 
572     /** android.stats */
573 
574     if (hasCapability(BACKWARD_COMPATIBLE)) {
575         static const uint8_t faceDetectMode =
576                 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
577         settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
578 
579         static const uint8_t hotPixelMapMode =
580                 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
581         settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
582     }
583 
584     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
585     // sharpnessMap only in frames
586 
587     /** android.control */
588 
589     uint8_t controlIntent = 0;
590     switch (type) {
591       case CAMERA3_TEMPLATE_PREVIEW:
592         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
593         break;
594       case CAMERA3_TEMPLATE_STILL_CAPTURE:
595         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
596         break;
597       case CAMERA3_TEMPLATE_VIDEO_RECORD:
598         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
599         break;
600       case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
601         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
602         break;
603       case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
604         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
605         break;
606       case CAMERA3_TEMPLATE_MANUAL:
607         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
608         break;
609       default:
610         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
611         break;
612     }
613     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
614 
615     const uint8_t controlMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
616             ANDROID_CONTROL_MODE_OFF :
617             ANDROID_CONTROL_MODE_AUTO;
618     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
619 
620     int32_t aeTargetFpsRange[2] = {
621         5, 30
622     };
623     if (type == CAMERA3_TEMPLATE_VIDEO_RECORD || type == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT) {
624         aeTargetFpsRange[0] = 30;
625     }
626     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
627 
628     if (hasCapability(BACKWARD_COMPATIBLE)) {
629 
630         static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
631         settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
632 
633         static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
634         settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
635 
636         const uint8_t aeMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
637                 ANDROID_CONTROL_AE_MODE_OFF :
638                 ANDROID_CONTROL_AE_MODE_ON;
639         settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
640 
641         static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
642         settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
643 
644         static const int32_t controlRegions[5] = {
645             0, 0, 0, 0, 0
646         };
647         settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
648 
649         static const int32_t aeExpCompensation = 0;
650         settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
651 
652 
653         static const uint8_t aeAntibandingMode =
654                 ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
655         settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
656 
657         static const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
658         settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1);
659 
660         const uint8_t awbMode = (type == CAMERA3_TEMPLATE_MANUAL) ?
661                 ANDROID_CONTROL_AWB_MODE_OFF :
662                 ANDROID_CONTROL_AWB_MODE_AUTO;
663         settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
664 
665         static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
666         settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
667 
668         uint8_t afMode = 0;
669         switch (type) {
670             case CAMERA3_TEMPLATE_PREVIEW:
671                 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
672                 break;
673             case CAMERA3_TEMPLATE_STILL_CAPTURE:
674                 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
675                 break;
676             case CAMERA3_TEMPLATE_VIDEO_RECORD:
677                 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
678                 break;
679             case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
680                 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
681                 break;
682             case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
683                 afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
684                 break;
685             case CAMERA3_TEMPLATE_MANUAL:
686                 afMode = ANDROID_CONTROL_AF_MODE_OFF;
687                 break;
688             default:
689                 afMode = ANDROID_CONTROL_AF_MODE_AUTO;
690                 break;
691         }
692         settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
693 
694         settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
695 
696         static const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
697         settings.update(ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1);
698 
699         static const uint8_t vstabMode =
700                 ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
701         settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
702 
703         static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
704         settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
705 
706         static const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
707         settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1);
708 
709         static const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
710         settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1);
711 
712         static const int32_t testPatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
713         settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testPatternMode, 1);
714     }
715 
716     mDefaultTemplates[type] = settings.release();
717 
718     return mDefaultTemplates[type];
719 }
720 
processCaptureRequest(camera3_capture_request * request)721 status_t EmulatedFakeCamera3::processCaptureRequest(
722         camera3_capture_request *request) {
723 
724     Mutex::Autolock l(mLock);
725     status_t res;
726 
727     /** Validation */
728 
729     if (mStatus < STATUS_READY) {
730         ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
731                 mStatus);
732         return INVALID_OPERATION;
733     }
734 
735     if (request == NULL) {
736         ALOGE("%s: NULL request!", __FUNCTION__);
737         return BAD_VALUE;
738     }
739 
740     uint32_t frameNumber = request->frame_number;
741 
742     if (request->settings == NULL && mPrevSettings.isEmpty()) {
743         ALOGE("%s: Request %d: NULL settings for first request after"
744                 "configureStreams()", __FUNCTION__, frameNumber);
745         return BAD_VALUE;
746     }
747 
748     if (request->input_buffer != NULL &&
749             request->input_buffer->stream != mInputStream) {
750         ALOGE("%s: Request %d: Input buffer not from input stream!",
751                 __FUNCTION__, frameNumber);
752         ALOGV("%s: Bad stream %p, expected: %p",
753               __FUNCTION__, request->input_buffer->stream,
754               mInputStream);
755         ALOGV("%s: Bad stream type %d, expected stream type %d",
756               __FUNCTION__, request->input_buffer->stream->stream_type,
757               mInputStream ? mInputStream->stream_type : -1);
758 
759         return BAD_VALUE;
760     }
761 
762     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
763         ALOGE("%s: Request %d: No output buffers provided!",
764                 __FUNCTION__, frameNumber);
765         return BAD_VALUE;
766     }
767 
768     // Validate all buffers, starting with input buffer if it's given
769 
770     ssize_t idx;
771     const camera3_stream_buffer_t *b;
772     if (request->input_buffer != NULL) {
773         idx = -1;
774         b = request->input_buffer;
775     } else {
776         idx = 0;
777         b = request->output_buffers;
778     }
779     do {
780         PrivateStreamInfo *priv =
781                 static_cast<PrivateStreamInfo*>(b->stream->priv);
782         if (priv == NULL) {
783             ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
784                     __FUNCTION__, frameNumber, idx);
785             return BAD_VALUE;
786         }
787         if (!priv->alive) {
788             ALOGE("%s: Request %d: Buffer %zu: Dead stream!",
789                     __FUNCTION__, frameNumber, idx);
790             return BAD_VALUE;
791         }
792         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
793             ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
794                     __FUNCTION__, frameNumber, idx);
795             return BAD_VALUE;
796         }
797         if (b->release_fence != -1) {
798             ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
799                     __FUNCTION__, frameNumber, idx);
800             return BAD_VALUE;
801         }
802         if (b->buffer == NULL) {
803             ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
804                     __FUNCTION__, frameNumber, idx);
805             return BAD_VALUE;
806         }
807         idx++;
808         b = &(request->output_buffers[idx]);
809     } while (idx < (ssize_t)request->num_output_buffers);
810 
811     // TODO: Validate settings parameters
812 
813     /**
814      * Start processing this request
815      */
816 
817     mStatus = STATUS_ACTIVE;
818 
819     CameraMetadata settings;
820 
821     if (request->settings == NULL) {
822         settings.acquire(mPrevSettings);
823     } else {
824         settings = request->settings;
825     }
826 
827     res = process3A(settings);
828     if (res != OK) {
829         return res;
830     }
831 
832     // TODO: Handle reprocessing
833 
834     /**
835      * Get ready for sensor config
836      */
837 
838     nsecs_t  exposureTime;
839     nsecs_t  frameDuration;
840     uint32_t sensitivity;
841     bool     needJpeg = false;
842     camera_metadata_entry_t entry;
843 
844     entry = settings.find(ANDROID_SENSOR_EXPOSURE_TIME);
845     exposureTime = (entry.count > 0) ? entry.data.i64[0] : Sensor::kExposureTimeRange[0];
846     entry = settings.find(ANDROID_SENSOR_FRAME_DURATION);
847     frameDuration = (entry.count > 0)? entry.data.i64[0] : Sensor::kFrameDurationRange[0];
848     entry = settings.find(ANDROID_SENSOR_SENSITIVITY);
849     sensitivity = (entry.count > 0) ? entry.data.i32[0] : Sensor::kSensitivityRange[0];
850 
851     if (exposureTime > frameDuration) {
852         frameDuration = exposureTime + Sensor::kMinVerticalBlank;
853         settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
854     }
855 
856     Buffers *sensorBuffers = new Buffers();
857     HalBufferVector *buffers = new HalBufferVector();
858 
859     sensorBuffers->setCapacity(request->num_output_buffers);
860     buffers->setCapacity(request->num_output_buffers);
861 
862     // Process all the buffers we got for output, constructing internal buffer
863     // structures for them, and lock them for writing.
864     for (size_t i = 0; i < request->num_output_buffers; i++) {
865         const camera3_stream_buffer &srcBuf = request->output_buffers[i];
866         const cb_handle_t *privBuffer =
867                 static_cast<const cb_handle_t*>(*srcBuf.buffer);
868         if (!cb_handle_t::validate(privBuffer)) {
869           privBuffer = nullptr;
870         }
871         StreamBuffer destBuf;
872         destBuf.streamId = kGenericStreamId;
873         destBuf.width    = srcBuf.stream->width;
874         destBuf.height   = srcBuf.stream->height;
875         // If we have more specific format information, use it.
876         destBuf.format = (privBuffer) ? privBuffer->format : srcBuf.stream->format;
877         destBuf.stride   = srcBuf.stream->width; // TODO: query from gralloc
878         destBuf.dataSpace = srcBuf.stream->data_space;
879         destBuf.buffer   = srcBuf.buffer;
880 
881         if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
882             needJpeg = true;
883         }
884 
885         // Wait on fence
886         sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
887         res = bufferAcquireFence->wait(kFenceTimeoutMs);
888         if (res == TIMED_OUT) {
889             ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
890                     __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
891         }
892         if (res == OK) {
893             // Lock buffer for writing
894             const Rect rect(destBuf.width, destBuf.height);
895             if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
896                 if (destBuf.format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
897                     android_ycbcr ycbcr = android_ycbcr();
898                     res = GraphicBufferMapper::get().lockYCbCr(
899                         *(destBuf.buffer),
900                         GRALLOC_USAGE_HW_CAMERA_WRITE, rect,
901                         &ycbcr);
902                     // This is only valid because we know that emulator's
903                     // YCbCr_420_888 is really contiguous NV21 under the hood
904                     destBuf.img = static_cast<uint8_t*>(ycbcr.y);
905                 } else {
906                     ALOGE("Unexpected private format for flexible YUV: 0x%x",
907                             destBuf.format);
908                     res = INVALID_OPERATION;
909                 }
910             } else {
911                 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
912                         GRALLOC_USAGE_HW_CAMERA_WRITE, rect,
913                         (void**)&(destBuf.img));
914             }
915             if (res != OK) {
916                 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
917                         __FUNCTION__, frameNumber, i);
918             }
919         }
920 
921         if (res != OK) {
922             // Either waiting or locking failed. Unlock locked buffers and bail
923             // out.
924             for (size_t j = 0; j < i; j++) {
925                 GraphicBufferMapper::get().unlock(
926                         *(request->output_buffers[i].buffer));
927             }
928             delete sensorBuffers;
929             delete buffers;
930             return NO_INIT;
931         }
932 
933         sensorBuffers->push_back(destBuf);
934         buffers->push_back(srcBuf);
935     }
936 
937     /**
938      * Wait for JPEG compressor to not be busy, if needed
939      */
940     if (needJpeg) {
941         bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
942         if (!ready) {
943             ALOGE("%s: Timeout waiting for JPEG compression to complete!",
944                     __FUNCTION__);
945             return NO_INIT;
946         }
947     }
948 
949     /**
950      * Wait until the in-flight queue has room
951      */
952     res = mReadoutThread->waitForReadout();
953     if (res != OK) {
954         ALOGE("%s: Timeout waiting for previous requests to complete!",
955                 __FUNCTION__);
956         return NO_INIT;
957     }
958 
959     /**
960      * Wait until sensor's ready. This waits for lengthy amounts of time with
961      * mLock held, but the interface spec is that no other calls may by done to
962      * the HAL by the framework while process_capture_request is happening.
963      */
964     int syncTimeoutCount = 0;
965     while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
966         if (mStatus == STATUS_ERROR) {
967             return NO_INIT;
968         }
969         if (syncTimeoutCount == kMaxSyncTimeoutCount) {
970             ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
971                     __FUNCTION__, frameNumber,
972                     kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
973             return NO_INIT;
974         }
975         syncTimeoutCount++;
976     }
977 
978     /**
979      * Configure sensor and queue up the request to the readout thread
980      */
981     mSensor->setExposureTime(exposureTime);
982     mSensor->setFrameDuration(frameDuration);
983     mSensor->setSensitivity(sensitivity);
984     mSensor->setDestinationBuffers(sensorBuffers);
985     mSensor->setFrameNumber(request->frame_number);
986 
987     ReadoutThread::Request r;
988     r.frameNumber = request->frame_number;
989     r.settings = settings;
990     r.sensorBuffers = sensorBuffers;
991     r.buffers = buffers;
992 
993     mReadoutThread->queueCaptureRequest(r);
994     ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
995 
996     // Cache the settings for next time
997     mPrevSettings.acquire(settings);
998 
999     return OK;
1000 }
1001 
flush()1002 status_t EmulatedFakeCamera3::flush() {
1003     ALOGW("%s: Not implemented; ignored", __FUNCTION__);
1004     return OK;
1005 }
1006 
1007 /** Debug methods */
1008 
dump(int fd)1009 void EmulatedFakeCamera3::dump(int fd) {
1010 
1011 }
1012 
1013 /**
1014  * Private methods
1015  */
1016 
getCameraCapabilities()1017 status_t EmulatedFakeCamera3::getCameraCapabilities() {
1018 
1019     const char *key = mFacingBack ? "qemu.sf.back_camera_caps" : "qemu.sf.front_camera_caps";
1020 
1021     /* Defined by 'qemu.sf.*_camera_caps' boot property: if the
1022      * property doesn't exist, it is assumed to list FULL. */
1023     char prop[PROPERTY_VALUE_MAX];
1024     if (property_get(key, prop, NULL) > 0) {
1025         char *saveptr = nullptr;
1026         char *cap = strtok_r(prop, " ,", &saveptr);
1027         while (cap != NULL) {
1028             for (int i = 0; i < NUM_CAPABILITIES; i++) {
1029                 if (!strcasecmp(cap, sAvailableCapabilitiesStrings[i])) {
1030                     mCapabilities.add(static_cast<AvailableCapabilities>(i));
1031                     break;
1032                 }
1033             }
1034             cap = strtok_r(NULL, " ,", &saveptr);
1035         }
1036         if (mCapabilities.size() == 0) {
1037             ALOGE("qemu.sf.back_camera_caps had no valid capabilities: %s", prop);
1038         }
1039     }
1040     // Default to FULL_LEVEL plus RAW if nothing is defined
1041     if (mCapabilities.size() == 0) {
1042         mCapabilities.add(FULL_LEVEL);
1043         mCapabilities.add(RAW);
1044     }
1045 
1046     // Add level-based caps
1047     if (hasCapability(FULL_LEVEL)) {
1048         mCapabilities.add(BURST_CAPTURE);
1049         mCapabilities.add(READ_SENSOR_SETTINGS);
1050         mCapabilities.add(MANUAL_SENSOR);
1051         mCapabilities.add(MANUAL_POST_PROCESSING);
1052     };
1053 
1054     // Backwards-compatible is required for most other caps
1055     // Not required for DEPTH_OUTPUT, though.
1056     if (hasCapability(BURST_CAPTURE) ||
1057             hasCapability(READ_SENSOR_SETTINGS) ||
1058             hasCapability(RAW) ||
1059             hasCapability(MANUAL_SENSOR) ||
1060             hasCapability(MANUAL_POST_PROCESSING) ||
1061             hasCapability(PRIVATE_REPROCESSING) ||
1062             hasCapability(YUV_REPROCESSING) ||
1063             hasCapability(CONSTRAINED_HIGH_SPEED_VIDEO)) {
1064         mCapabilities.add(BACKWARD_COMPATIBLE);
1065     }
1066 
1067     ALOGI("Camera %d capabilities:", mCameraID);
1068     for (size_t i = 0; i < mCapabilities.size(); i++) {
1069         ALOGI("  %s", sAvailableCapabilitiesStrings[mCapabilities[i]]);
1070     }
1071 
1072     return OK;
1073 }
1074 
hasCapability(AvailableCapabilities cap)1075 bool EmulatedFakeCamera3::hasCapability(AvailableCapabilities cap) {
1076     ssize_t idx = mCapabilities.indexOf(cap);
1077     return idx >= 0;
1078 }
1079 
constructStaticInfo()1080 status_t EmulatedFakeCamera3::constructStaticInfo() {
1081 
1082     CameraMetadata info;
1083     Vector<int32_t> availableCharacteristicsKeys;
1084     status_t res;
1085 
1086 #define ADD_STATIC_ENTRY(name, varptr, count) \
1087         availableCharacteristicsKeys.add(name);   \
1088         res = info.update(name, varptr, count); \
1089         if (res != OK) return res
1090 
1091     // android.sensor
1092 
1093     if (hasCapability(MANUAL_SENSOR)) {
1094 
1095         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1096                 Sensor::kExposureTimeRange, 2);
1097 
1098         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1099                 &Sensor::kFrameDurationRange[1], 1);
1100 
1101         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1102                 Sensor::kSensitivityRange,
1103                 sizeof(Sensor::kSensitivityRange)
1104                 /sizeof(int32_t));
1105 
1106         ADD_STATIC_ENTRY(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
1107                 &Sensor::kSensitivityRange[1], 1);
1108     }
1109 
1110     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1111     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1112             sensorPhysicalSize, 2);
1113 
1114     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1115             (int32_t*)Sensor::kResolution, 2);
1116 
1117     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1118             (int32_t*)Sensor::kActiveArray, 4);
1119 
1120     static const int32_t orientation = 90; // Aligned with 'long edge'
1121     ADD_STATIC_ENTRY(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1122 
1123     static const uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME;
1124     ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE, &timestampSource, 1);
1125 
1126     if (hasCapability(RAW)) {
1127         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1128                 &Sensor::kColorFilterArrangement, 1);
1129 
1130         ADD_STATIC_ENTRY(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1131                 (int32_t*)&Sensor::kMaxRawValue, 1);
1132 
1133         static const int32_t blackLevelPattern[4] = {
1134             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1135             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1136         };
1137         ADD_STATIC_ENTRY(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1138                 blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1139     }
1140 
1141     if (hasCapability(BACKWARD_COMPATIBLE)) {
1142         static const int32_t availableTestPatternModes[] = {
1143             ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
1144         };
1145         ADD_STATIC_ENTRY(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
1146                 availableTestPatternModes, sizeof(availableTestPatternModes)/sizeof(int32_t));
1147     }
1148 
1149     // android.lens
1150 
1151     static const float focalLength = 3.30f; // mm
1152     ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1153             &focalLength, 1);
1154 
1155     if (hasCapability(BACKWARD_COMPATIBLE)) {
1156         // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1157         const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
1158         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1159                 &minFocusDistance, 1);
1160 
1161         // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1162         const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1163         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1164                 &minFocusDistance, 1);
1165 
1166         static const float aperture = 2.8f;
1167         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1168                 &aperture, 1);
1169         static const float filterDensity = 0;
1170         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1171                 &filterDensity, 1);
1172         static const uint8_t availableOpticalStabilization =
1173                 ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1174         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1175                 &availableOpticalStabilization, 1);
1176 
1177         static const int32_t lensShadingMapSize[] = {1, 1};
1178         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1179                 sizeof(lensShadingMapSize)/sizeof(int32_t));
1180 
1181         static const uint8_t lensFocusCalibration =
1182                 ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE;
1183         ADD_STATIC_ENTRY(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION, &lensFocusCalibration, 1);
1184     }
1185 
1186     if (hasCapability(DEPTH_OUTPUT)) {
1187         // These could be included for non-DEPTH capability as well, but making this variable for
1188         // testing coverage
1189 
1190         // 90 degree rotation to align with long edge of a phone device that's by default portrait
1191         static const float qO[] = { 0.707107f, 0.f, 0.f, 0.707107f};
1192 
1193         // Either a 180-degree rotation for back-facing, or no rotation for front-facing
1194         const float qF[] = {0, (mFacingBack ? 1.f : 0.f), 0, (mFacingBack ? 0.f : 1.f)};
1195 
1196         // Quarternion product, orientation change then facing
1197         const float lensPoseRotation[] = {qO[0]*qF[0] - qO[1]*qF[1] - qO[2]*qF[2] - qO[3]*qF[3],
1198                                           qO[0]*qF[1] + qO[1]*qF[0] + qO[2]*qF[3] - qO[3]*qF[2],
1199                                           qO[0]*qF[2] + qO[2]*qF[0] + qO[1]*qF[3] - qO[3]*qF[1],
1200                                           qO[0]*qF[3] + qO[3]*qF[0] + qO[1]*qF[2] - qO[2]*qF[1]};
1201 
1202         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_ROTATION, lensPoseRotation,
1203                 sizeof(lensPoseRotation)/sizeof(float));
1204 
1205         // Only one camera facing each way, so 0 translation needed to the center of the 'main'
1206         // camera
1207         static const float lensPoseTranslation[] = {0.f, 0.f, 0.f};
1208 
1209         ADD_STATIC_ENTRY(ANDROID_LENS_POSE_TRANSLATION, lensPoseTranslation,
1210                 sizeof(lensPoseTranslation)/sizeof(float));
1211 
1212         // Intrinsics are 'ideal' (f_x, f_y, c_x, c_y, s) match focal length and active array size
1213         float f_x = focalLength * Sensor::kActiveArray[2] / sensorPhysicalSize[0];
1214         float f_y = focalLength * Sensor::kActiveArray[3] / sensorPhysicalSize[1];
1215         float c_x = Sensor::kActiveArray[2] / 2.f;
1216         float c_y = Sensor::kActiveArray[3] / 2.f;
1217         float s = 0.f;
1218         const float lensIntrinsics[] = { f_x, f_y, c_x, c_y, s };
1219 
1220         ADD_STATIC_ENTRY(ANDROID_LENS_INTRINSIC_CALIBRATION, lensIntrinsics,
1221                 sizeof(lensIntrinsics)/sizeof(float));
1222 
1223         // No radial or tangential distortion
1224 
1225         float lensRadialDistortion[] = {1.0f, 0.f, 0.f, 0.f, 0.f, 0.f};
1226 
1227         ADD_STATIC_ENTRY(ANDROID_LENS_RADIAL_DISTORTION, lensRadialDistortion,
1228                 sizeof(lensRadialDistortion)/sizeof(float));
1229 
1230     }
1231 
1232 
1233     static const uint8_t lensFacing = mFacingBack ?
1234             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1235     ADD_STATIC_ENTRY(ANDROID_LENS_FACING, &lensFacing, 1);
1236 
1237     // android.flash
1238 
1239     static const uint8_t flashAvailable = 0;
1240     ADD_STATIC_ENTRY(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1241 
1242     // android.tonemap
1243 
1244     if (hasCapability(MANUAL_POST_PROCESSING)) {
1245         static const int32_t tonemapCurvePoints = 128;
1246         ADD_STATIC_ENTRY(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1247 
1248         static const uint8_t availableToneMapModes[] = {
1249             ANDROID_TONEMAP_MODE_CONTRAST_CURVE,  ANDROID_TONEMAP_MODE_FAST,
1250             ANDROID_TONEMAP_MODE_HIGH_QUALITY
1251         };
1252         ADD_STATIC_ENTRY(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES, availableToneMapModes,
1253                 sizeof(availableToneMapModes));
1254     }
1255 
1256     // android.scaler
1257 
1258     const std::vector<int32_t> availableStreamConfigurationsBasic = {
1259         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1260         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1261         HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1262         HAL_PIXEL_FORMAT_BLOB, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
1263     };
1264     const std::vector<int32_t> availableStreamConfigurationsRaw = {
1265         HAL_PIXEL_FORMAT_RAW16, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
1266     };
1267     const std::vector<int32_t> availableStreamConfigurationsBurst = {
1268         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1269         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
1270         HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT
1271     };
1272 
1273     std::vector<int32_t> availableStreamConfigurations;
1274 
1275     if (hasCapability(BACKWARD_COMPATIBLE)) {
1276         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
1277                 availableStreamConfigurationsBasic.begin(),
1278                 availableStreamConfigurationsBasic.end());
1279     }
1280     if (hasCapability(RAW)) {
1281         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
1282                 availableStreamConfigurationsRaw.begin(),
1283                 availableStreamConfigurationsRaw.end());
1284     }
1285     if (hasCapability(BURST_CAPTURE)) {
1286         availableStreamConfigurations.insert(availableStreamConfigurations.end(),
1287                 availableStreamConfigurationsBurst.begin(),
1288                 availableStreamConfigurationsBurst.end());
1289     }
1290 
1291     if (availableStreamConfigurations.size() > 0) {
1292         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
1293                 &availableStreamConfigurations[0],
1294                 availableStreamConfigurations.size());
1295     }
1296 
1297     const std::vector<int64_t> availableMinFrameDurationsBasic = {
1298         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, Sensor::kFrameDurationRange[0],
1299         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, Sensor::kFrameDurationRange[0],
1300         HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, Sensor::kFrameDurationRange[0],
1301         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
1302     };
1303     const std::vector<int64_t> availableMinFrameDurationsRaw = {
1304         HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0]
1305     };
1306     const std::vector<int64_t> availableMinFrameDurationsBurst = {
1307         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, Sensor::kFrameDurationRange[0],
1308         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, Sensor::kFrameDurationRange[0],
1309         HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, Sensor::kFrameDurationRange[0],
1310     };
1311 
1312     std::vector<int64_t> availableMinFrameDurations;
1313 
1314     if (hasCapability(BACKWARD_COMPATIBLE)) {
1315         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1316                 availableMinFrameDurationsBasic.begin(),
1317                 availableMinFrameDurationsBasic.end());
1318     }
1319     if (hasCapability(RAW)) {
1320         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1321                 availableMinFrameDurationsRaw.begin(),
1322                 availableMinFrameDurationsRaw.end());
1323     }
1324     if (hasCapability(BURST_CAPTURE)) {
1325         availableMinFrameDurations.insert(availableMinFrameDurations.end(),
1326                 availableMinFrameDurationsBurst.begin(),
1327                 availableMinFrameDurationsBurst.end());
1328     }
1329 
1330     if (availableMinFrameDurations.size() > 0) {
1331         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
1332                 &availableMinFrameDurations[0],
1333                 availableMinFrameDurations.size());
1334     }
1335 
1336     const std::vector<int64_t> availableStallDurationsBasic = {
1337         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 320, 240, 0,
1338         HAL_PIXEL_FORMAT_YCbCr_420_888, 320, 240, 0,
1339         HAL_PIXEL_FORMAT_RGBA_8888, 320, 240, 0,
1340         HAL_PIXEL_FORMAT_BLOB, 640, 480, Sensor::kFrameDurationRange[0]
1341     };
1342     const std::vector<int64_t> availableStallDurationsRaw = {
1343         HAL_PIXEL_FORMAT_RAW16, 640, 480, Sensor::kFrameDurationRange[0]
1344     };
1345     const std::vector<int64_t> availableStallDurationsBurst = {
1346         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 640, 480, 0,
1347         HAL_PIXEL_FORMAT_YCbCr_420_888, 640, 480, 0,
1348         HAL_PIXEL_FORMAT_RGBA_8888, 640, 480, 0
1349     };
1350 
1351     std::vector<int64_t> availableStallDurations;
1352 
1353     if (hasCapability(BACKWARD_COMPATIBLE)) {
1354         availableStallDurations.insert(availableStallDurations.end(),
1355                 availableStallDurationsBasic.begin(),
1356                 availableStallDurationsBasic.end());
1357     }
1358     if (hasCapability(RAW)) {
1359         availableStallDurations.insert(availableStallDurations.end(),
1360                 availableStallDurationsRaw.begin(),
1361                 availableStallDurationsRaw.end());
1362     }
1363     if (hasCapability(BURST_CAPTURE)) {
1364         availableStallDurations.insert(availableStallDurations.end(),
1365                 availableStallDurationsBurst.begin(),
1366                 availableStallDurationsBurst.end());
1367     }
1368 
1369     if (availableStallDurations.size() > 0) {
1370         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
1371                 &availableStallDurations[0],
1372                 availableStallDurations.size());
1373     }
1374 
1375     if (hasCapability(BACKWARD_COMPATIBLE)) {
1376         static const uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
1377         ADD_STATIC_ENTRY(ANDROID_SCALER_CROPPING_TYPE,
1378                 &croppingType, 1);
1379 
1380         static const float maxZoom = 10;
1381         ADD_STATIC_ENTRY(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1382                 &maxZoom, 1);
1383     }
1384 
1385     // android.jpeg
1386 
1387     if (hasCapability(BACKWARD_COMPATIBLE)) {
1388         static const int32_t jpegThumbnailSizes[] = {
1389             0, 0,
1390             160, 120,
1391             320, 240
1392         };
1393         ADD_STATIC_ENTRY(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1394                 jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1395 
1396         static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1397         ADD_STATIC_ENTRY(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1398     }
1399 
1400     // android.stats
1401 
1402     if (hasCapability(BACKWARD_COMPATIBLE)) {
1403         static const uint8_t availableFaceDetectModes[] = {
1404             ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1405             ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1406             ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1407         };
1408         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1409                 availableFaceDetectModes,
1410                 sizeof(availableFaceDetectModes));
1411 
1412         static const int32_t maxFaceCount = 8;
1413         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1414                 &maxFaceCount, 1);
1415 
1416 
1417         static const uint8_t availableShadingMapModes[] = {
1418             ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF
1419         };
1420         ADD_STATIC_ENTRY(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
1421                 availableShadingMapModes, sizeof(availableShadingMapModes));
1422     }
1423 
1424     // android.sync
1425 
1426     static const int32_t maxLatency =
1427             hasCapability(FULL_LEVEL) ? ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL : 3;
1428     ADD_STATIC_ENTRY(ANDROID_SYNC_MAX_LATENCY, &maxLatency, 1);
1429 
1430     // android.control
1431 
1432     if (hasCapability(BACKWARD_COMPATIBLE)) {
1433         static const uint8_t availableControlModes[] = {
1434             ANDROID_CONTROL_MODE_OFF, ANDROID_CONTROL_MODE_AUTO, ANDROID_CONTROL_MODE_USE_SCENE_MODE
1435         };
1436         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1437                 availableControlModes, sizeof(availableControlModes));
1438     } else {
1439         static const uint8_t availableControlModes[] = {
1440             ANDROID_CONTROL_MODE_AUTO
1441         };
1442         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_MODES,
1443                 availableControlModes, sizeof(availableControlModes));
1444     }
1445 
1446     static const uint8_t availableSceneModes[] = {
1447         hasCapability(BACKWARD_COMPATIBLE) ?
1448             ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY :
1449             ANDROID_CONTROL_SCENE_MODE_DISABLED
1450     };
1451     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1452             availableSceneModes, sizeof(availableSceneModes));
1453 
1454     if (hasCapability(BACKWARD_COMPATIBLE)) {
1455         static const uint8_t availableEffects[] = {
1456             ANDROID_CONTROL_EFFECT_MODE_OFF
1457         };
1458         ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1459                 availableEffects, sizeof(availableEffects));
1460     }
1461 
1462     if (hasCapability(BACKWARD_COMPATIBLE)) {
1463         static const int32_t max3aRegions[] = {/*AE*/ 1,/*AWB*/ 0,/*AF*/ 1};
1464         ADD_STATIC_ENTRY(ANDROID_CONTROL_MAX_REGIONS,
1465                 max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1466 
1467         static const uint8_t availableAeModes[] = {
1468             ANDROID_CONTROL_AE_MODE_OFF,
1469             ANDROID_CONTROL_AE_MODE_ON
1470         };
1471         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1472                 availableAeModes, sizeof(availableAeModes));
1473 
1474         static const camera_metadata_rational exposureCompensationStep = {
1475             1, 3
1476         };
1477         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1478                 &exposureCompensationStep, 1);
1479 
1480         int32_t exposureCompensationRange[] = {-9, 9};
1481         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1482                 exposureCompensationRange,
1483                 sizeof(exposureCompensationRange)/sizeof(int32_t));
1484     }
1485 
1486     static const int32_t availableTargetFpsRanges[] = {
1487             5, 30, 15, 30, 15, 15, 30, 30
1488     };
1489     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1490             availableTargetFpsRanges,
1491             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1492 
1493     if (hasCapability(BACKWARD_COMPATIBLE)) {
1494         static const uint8_t availableAntibandingModes[] = {
1495             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1496             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
1497         };
1498         ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1499                 availableAntibandingModes, sizeof(availableAntibandingModes));
1500     }
1501 
1502     static const uint8_t aeLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
1503             ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
1504 
1505     ADD_STATIC_ENTRY(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
1506             &aeLockAvailable, 1);
1507 
1508     if (hasCapability(BACKWARD_COMPATIBLE)) {
1509         static const uint8_t availableAwbModes[] = {
1510             ANDROID_CONTROL_AWB_MODE_OFF,
1511             ANDROID_CONTROL_AWB_MODE_AUTO,
1512             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1513             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1514             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1515             ANDROID_CONTROL_AWB_MODE_SHADE
1516         };
1517         ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1518                 availableAwbModes, sizeof(availableAwbModes));
1519     }
1520 
1521     static const uint8_t awbLockAvailable = hasCapability(BACKWARD_COMPATIBLE) ?
1522             ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
1523 
1524     ADD_STATIC_ENTRY(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
1525             &awbLockAvailable, 1);
1526 
1527     static const uint8_t availableAfModesBack[] = {
1528             ANDROID_CONTROL_AF_MODE_OFF,
1529             ANDROID_CONTROL_AF_MODE_AUTO,
1530             ANDROID_CONTROL_AF_MODE_MACRO,
1531             ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1532             ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
1533     };
1534 
1535     static const uint8_t availableAfModesFront[] = {
1536             ANDROID_CONTROL_AF_MODE_OFF
1537     };
1538 
1539     if (mFacingBack && hasCapability(BACKWARD_COMPATIBLE)) {
1540         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1541                 availableAfModesBack, sizeof(availableAfModesBack));
1542     } else {
1543         ADD_STATIC_ENTRY(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1544                 availableAfModesFront, sizeof(availableAfModesFront));
1545     }
1546 
1547     static const uint8_t availableVstabModes[] = {
1548         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
1549     };
1550     ADD_STATIC_ENTRY(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1551             availableVstabModes, sizeof(availableVstabModes));
1552 
1553     // android.colorCorrection
1554 
1555     if (hasCapability(BACKWARD_COMPATIBLE)) {
1556         static const uint8_t availableAberrationModes[] = {
1557             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1558             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
1559             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
1560         };
1561         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1562                 availableAberrationModes, sizeof(availableAberrationModes));
1563     } else {
1564         static const uint8_t availableAberrationModes[] = {
1565             ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
1566         };
1567         ADD_STATIC_ENTRY(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
1568                 availableAberrationModes, sizeof(availableAberrationModes));
1569     }
1570     // android.edge
1571 
1572     if (hasCapability(BACKWARD_COMPATIBLE)) {
1573         static const uint8_t availableEdgeModes[] = {
1574             ANDROID_EDGE_MODE_OFF, ANDROID_EDGE_MODE_FAST, ANDROID_EDGE_MODE_HIGH_QUALITY
1575         };
1576         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1577                 availableEdgeModes, sizeof(availableEdgeModes));
1578     } else {
1579         static const uint8_t availableEdgeModes[] = {
1580             ANDROID_EDGE_MODE_OFF
1581         };
1582         ADD_STATIC_ENTRY(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
1583                 availableEdgeModes, sizeof(availableEdgeModes));
1584     }
1585 
1586     // android.info
1587 
1588     static const uint8_t supportedHardwareLevel =
1589             hasCapability(FULL_LEVEL) ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
1590                     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1591     ADD_STATIC_ENTRY(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1592                 &supportedHardwareLevel,
1593                 /*count*/1);
1594 
1595     // android.noiseReduction
1596 
1597     if (hasCapability(BACKWARD_COMPATIBLE)) {
1598         static const uint8_t availableNoiseReductionModes[] = {
1599             ANDROID_NOISE_REDUCTION_MODE_OFF,
1600             ANDROID_NOISE_REDUCTION_MODE_FAST,
1601             ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY
1602         };
1603         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1604                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
1605     } else {
1606         static const uint8_t availableNoiseReductionModes[] = {
1607             ANDROID_NOISE_REDUCTION_MODE_OFF,
1608         };
1609         ADD_STATIC_ENTRY(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
1610                 availableNoiseReductionModes, sizeof(availableNoiseReductionModes));
1611     }
1612 
1613     // android.depth
1614 
1615     if (hasCapability(DEPTH_OUTPUT)) {
1616 
1617         static const int32_t maxDepthSamples = 100;
1618         ADD_STATIC_ENTRY(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
1619                 &maxDepthSamples, 1);
1620 
1621         static const int32_t availableDepthStreamConfigurations[] = {
1622             HAL_PIXEL_FORMAT_Y16, 160, 120, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT,
1623             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT
1624         };
1625         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
1626                 availableDepthStreamConfigurations,
1627                 sizeof(availableDepthStreamConfigurations)/sizeof(int32_t));
1628 
1629         static const int64_t availableDepthMinFrameDurations[] = {
1630             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
1631             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
1632         };
1633         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
1634                 availableDepthMinFrameDurations,
1635                 sizeof(availableDepthMinFrameDurations)/sizeof(int64_t));
1636 
1637         static const int64_t availableDepthStallDurations[] = {
1638             HAL_PIXEL_FORMAT_Y16, 160, 120, Sensor::kFrameDurationRange[0],
1639             HAL_PIXEL_FORMAT_BLOB, maxDepthSamples,1, Sensor::kFrameDurationRange[0]
1640         };
1641         ADD_STATIC_ENTRY(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
1642                 availableDepthStallDurations,
1643                 sizeof(availableDepthStallDurations)/sizeof(int64_t));
1644 
1645         uint8_t depthIsExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
1646         ADD_STATIC_ENTRY(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
1647                 &depthIsExclusive, 1);
1648     }
1649 
1650     // android.shading
1651 
1652     if (hasCapability(BACKWARD_COMPATIBLE)) {
1653         static const uint8_t availableShadingModes[] = {
1654             ANDROID_SHADING_MODE_OFF, ANDROID_SHADING_MODE_FAST, ANDROID_SHADING_MODE_HIGH_QUALITY
1655         };
1656         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1657                 sizeof(availableShadingModes));
1658     } else {
1659         static const uint8_t availableShadingModes[] = {
1660             ANDROID_SHADING_MODE_OFF
1661         };
1662         ADD_STATIC_ENTRY(ANDROID_SHADING_AVAILABLE_MODES, availableShadingModes,
1663                 sizeof(availableShadingModes));
1664     }
1665 
1666     // android.request
1667 
1668     static const int32_t maxNumOutputStreams[] = {
1669             kMaxRawStreamCount, kMaxProcessedStreamCount, kMaxJpegStreamCount
1670     };
1671     ADD_STATIC_ENTRY(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, maxNumOutputStreams, 3);
1672 
1673     static const uint8_t maxPipelineDepth = kMaxBufferCount;
1674     ADD_STATIC_ENTRY(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &maxPipelineDepth, 1);
1675 
1676     static const int32_t partialResultCount = 1;
1677     ADD_STATIC_ENTRY(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
1678             &partialResultCount, /*count*/1);
1679 
1680     SortedVector<uint8_t> caps;
1681     for (size_t i = 0; i < mCapabilities.size(); i++) {
1682         switch(mCapabilities[i]) {
1683             case BACKWARD_COMPATIBLE:
1684                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
1685                 break;
1686             case MANUAL_SENSOR:
1687                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
1688                 break;
1689             case MANUAL_POST_PROCESSING:
1690                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
1691                 break;
1692             case RAW:
1693                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
1694                 break;
1695             case PRIVATE_REPROCESSING:
1696                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
1697                 break;
1698             case READ_SENSOR_SETTINGS:
1699                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
1700                 break;
1701             case BURST_CAPTURE:
1702                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
1703                 break;
1704             case YUV_REPROCESSING:
1705                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
1706                 break;
1707             case DEPTH_OUTPUT:
1708                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
1709                 break;
1710             case CONSTRAINED_HIGH_SPEED_VIDEO:
1711                 caps.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
1712                 break;
1713             default:
1714                 // Ignore LEVELs
1715                 break;
1716         }
1717     }
1718     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_CAPABILITIES, caps.array(), caps.size());
1719 
1720     // Scan a default request template for included request keys
1721     Vector<int32_t> availableRequestKeys;
1722     const camera_metadata_t *previewRequest =
1723         constructDefaultRequestSettings(CAMERA3_TEMPLATE_PREVIEW);
1724     for (size_t i = 0; i < get_camera_metadata_entry_count(previewRequest); i++) {
1725         camera_metadata_ro_entry_t entry;
1726         get_camera_metadata_ro_entry(previewRequest, i, &entry);
1727         availableRequestKeys.add(entry.tag);
1728     }
1729     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, availableRequestKeys.array(),
1730             availableRequestKeys.size());
1731 
1732     // Add a few more result keys. Must be kept up to date with the various places that add these
1733 
1734     Vector<int32_t> availableResultKeys(availableRequestKeys);
1735     if (hasCapability(BACKWARD_COMPATIBLE)) {
1736         availableResultKeys.add(ANDROID_CONTROL_AE_STATE);
1737         availableResultKeys.add(ANDROID_CONTROL_AF_STATE);
1738         availableResultKeys.add(ANDROID_CONTROL_AWB_STATE);
1739         availableResultKeys.add(ANDROID_FLASH_STATE);
1740         availableResultKeys.add(ANDROID_LENS_STATE);
1741         availableResultKeys.add(ANDROID_LENS_FOCUS_RANGE);
1742         availableResultKeys.add(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW);
1743         availableResultKeys.add(ANDROID_STATISTICS_SCENE_FLICKER);
1744     }
1745 
1746     if (hasCapability(DEPTH_OUTPUT)) {
1747         availableResultKeys.add(ANDROID_LENS_POSE_ROTATION);
1748         availableResultKeys.add(ANDROID_LENS_POSE_TRANSLATION);
1749         availableResultKeys.add(ANDROID_LENS_INTRINSIC_CALIBRATION);
1750         availableResultKeys.add(ANDROID_LENS_RADIAL_DISTORTION);
1751     }
1752 
1753     availableResultKeys.add(ANDROID_REQUEST_PIPELINE_DEPTH);
1754     availableResultKeys.add(ANDROID_SENSOR_TIMESTAMP);
1755 
1756     ADD_STATIC_ENTRY(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, availableResultKeys.array(),
1757             availableResultKeys.size());
1758 
1759     // Needs to be last, to collect all the keys set
1760 
1761     availableCharacteristicsKeys.add(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
1762     info.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
1763             availableCharacteristicsKeys);
1764 
1765     mCameraInfo = info.release();
1766 
1767 #undef ADD_STATIC_ENTRY
1768     return OK;
1769 }
1770 
process3A(CameraMetadata & settings)1771 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
1772     /**
1773      * Extract top-level 3A controls
1774      */
1775     status_t res;
1776 
1777     bool facePriority = false;
1778 
1779     camera_metadata_entry e;
1780 
1781     e = settings.find(ANDROID_CONTROL_MODE);
1782     if (e.count == 0) {
1783         ALOGE("%s: No control mode entry!", __FUNCTION__);
1784         return BAD_VALUE;
1785     }
1786     uint8_t controlMode = e.data.u8[0];
1787 
1788     if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1789         mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
1790         mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
1791         mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1792         update3A(settings);
1793         return OK;
1794     } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1795         if (!hasCapability(BACKWARD_COMPATIBLE)) {
1796             ALOGE("%s: Can't use scene mode when BACKWARD_COMPATIBLE not supported!",
1797                   __FUNCTION__);
1798             return BAD_VALUE;
1799         }
1800 
1801         e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1802         if (e.count == 0) {
1803             ALOGE("%s: No scene mode entry!", __FUNCTION__);
1804             return BAD_VALUE;
1805         }
1806         uint8_t sceneMode = e.data.u8[0];
1807 
1808         switch(sceneMode) {
1809             case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1810                 mFacePriority = true;
1811                 break;
1812             default:
1813                 ALOGE("%s: Emulator doesn't support scene mode %d",
1814                         __FUNCTION__, sceneMode);
1815                 return BAD_VALUE;
1816         }
1817     } else {
1818         mFacePriority = false;
1819     }
1820 
1821     // controlMode == AUTO or sceneMode = FACE_PRIORITY
1822     // Process individual 3A controls
1823 
1824     res = doFakeAE(settings);
1825     if (res != OK) return res;
1826 
1827     res = doFakeAF(settings);
1828     if (res != OK) return res;
1829 
1830     res = doFakeAWB(settings);
1831     if (res != OK) return res;
1832 
1833     update3A(settings);
1834     return OK;
1835 }
1836 
doFakeAE(CameraMetadata & settings)1837 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
1838     camera_metadata_entry e;
1839 
1840     e = settings.find(ANDROID_CONTROL_AE_MODE);
1841     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1842         ALOGE("%s: No AE mode entry!", __FUNCTION__);
1843         return BAD_VALUE;
1844     }
1845     uint8_t aeMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AE_MODE_ON;
1846 
1847     switch (aeMode) {
1848         case ANDROID_CONTROL_AE_MODE_OFF:
1849             // AE is OFF
1850             mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1851             return OK;
1852         case ANDROID_CONTROL_AE_MODE_ON:
1853             // OK for AUTO modes
1854             break;
1855         default:
1856             // Mostly silently ignore unsupported modes
1857             ALOGV("%s: Emulator doesn't support AE mode %d, assuming ON",
1858                     __FUNCTION__, aeMode);
1859             break;
1860     }
1861 
1862     e = settings.find(ANDROID_CONTROL_AE_LOCK);
1863     bool aeLocked = (e.count > 0) ? (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON) : false;
1864 
1865     e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
1866     bool precaptureTrigger = false;
1867     if (e.count != 0) {
1868         precaptureTrigger =
1869                 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
1870     }
1871 
1872     if (precaptureTrigger) {
1873         ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
1874     } else if (e.count > 0) {
1875         ALOGV("%s: Pre capture trigger was present? %zu",
1876               __FUNCTION__,
1877               e.count);
1878     }
1879 
1880     if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1881         // Run precapture sequence
1882         if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1883             mAeCounter = 0;
1884         }
1885 
1886         if (mFacePriority) {
1887             mAeTargetExposureTime = kFacePriorityExposureTime;
1888         } else {
1889             mAeTargetExposureTime = kNormalExposureTime;
1890         }
1891 
1892         if (mAeCounter > kPrecaptureMinFrames &&
1893                 (mAeTargetExposureTime - mAeCurrentExposureTime) <
1894                 mAeTargetExposureTime / 10) {
1895             // Done with precapture
1896             mAeCounter = 0;
1897             mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
1898                     ANDROID_CONTROL_AE_STATE_CONVERGED;
1899         } else {
1900             // Converge some more
1901             mAeCurrentExposureTime +=
1902                     (mAeTargetExposureTime - mAeCurrentExposureTime) *
1903                     kExposureTrackRate;
1904             mAeCounter++;
1905             mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1906         }
1907 
1908     } else if (!aeLocked) {
1909         // Run standard occasional AE scan
1910         switch (mAeState) {
1911             case ANDROID_CONTROL_AE_STATE_CONVERGED:
1912             case ANDROID_CONTROL_AE_STATE_INACTIVE:
1913                 mAeCounter++;
1914                 if (mAeCounter > kStableAeMaxFrames) {
1915                     mAeTargetExposureTime =
1916                             mFacePriority ? kFacePriorityExposureTime :
1917                             kNormalExposureTime;
1918                     float exposureStep = ((double)rand() / RAND_MAX) *
1919                             (kExposureWanderMax - kExposureWanderMin) +
1920                             kExposureWanderMin;
1921                     mAeTargetExposureTime *= std::pow(2, exposureStep);
1922                     mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
1923                 }
1924                 break;
1925             case ANDROID_CONTROL_AE_STATE_SEARCHING:
1926                 mAeCurrentExposureTime +=
1927                         (mAeTargetExposureTime - mAeCurrentExposureTime) *
1928                         kExposureTrackRate;
1929                 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
1930                         mAeTargetExposureTime / 10) {
1931                     // Close enough
1932                     mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1933                     mAeCounter = 0;
1934                 }
1935                 break;
1936             case ANDROID_CONTROL_AE_STATE_LOCKED:
1937                 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1938                 mAeCounter = 0;
1939                 break;
1940             default:
1941                 ALOGE("%s: Emulator in unexpected AE state %d",
1942                         __FUNCTION__, mAeState);
1943                 return INVALID_OPERATION;
1944         }
1945     } else {
1946         // AE is locked
1947         mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
1948     }
1949 
1950     return OK;
1951 }
1952 
doFakeAF(CameraMetadata & settings)1953 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
1954     camera_metadata_entry e;
1955 
1956     e = settings.find(ANDROID_CONTROL_AF_MODE);
1957     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
1958         ALOGE("%s: No AF mode entry!", __FUNCTION__);
1959         return BAD_VALUE;
1960     }
1961     uint8_t afMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AF_MODE_OFF;
1962 
1963     e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
1964     typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
1965     af_trigger_t afTrigger;
1966     if (e.count != 0) {
1967         afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
1968 
1969         ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
1970         ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
1971     } else {
1972         afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
1973     }
1974 
1975     switch (afMode) {
1976         case ANDROID_CONTROL_AF_MODE_OFF:
1977             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1978             return OK;
1979         case ANDROID_CONTROL_AF_MODE_AUTO:
1980         case ANDROID_CONTROL_AF_MODE_MACRO:
1981         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1982         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1983             if (!mFacingBack) {
1984                 ALOGE("%s: Front camera doesn't support AF mode %d",
1985                         __FUNCTION__, afMode);
1986                 return BAD_VALUE;
1987             }
1988             // OK, handle transitions lower on
1989             break;
1990         default:
1991             ALOGE("%s: Emulator doesn't support AF mode %d",
1992                     __FUNCTION__, afMode);
1993             return BAD_VALUE;
1994     }
1995 
1996     bool afModeChanged = mAfMode != afMode;
1997     mAfMode = afMode;
1998 
1999     /**
2000      * Simulate AF triggers. Transition at most 1 state per frame.
2001      * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
2002      */
2003 
2004     bool afTriggerStart = false;
2005     bool afTriggerCancel = false;
2006     switch (afTrigger) {
2007         case ANDROID_CONTROL_AF_TRIGGER_IDLE:
2008             break;
2009         case ANDROID_CONTROL_AF_TRIGGER_START:
2010             afTriggerStart = true;
2011             break;
2012         case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
2013             afTriggerCancel = true;
2014             // Cancel trigger always transitions into INACTIVE
2015             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
2016 
2017             ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
2018 
2019             // Stay in 'inactive' until at least next frame
2020             return OK;
2021         default:
2022             ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
2023             return BAD_VALUE;
2024     }
2025 
2026     // If we get down here, we're either in an autofocus mode
2027     //  or in a continuous focus mode (and no other modes)
2028 
2029     int oldAfState = mAfState;
2030     switch (mAfState) {
2031         case ANDROID_CONTROL_AF_STATE_INACTIVE:
2032             if (afTriggerStart) {
2033                 switch (afMode) {
2034                     case ANDROID_CONTROL_AF_MODE_AUTO:
2035                         // fall-through
2036                     case ANDROID_CONTROL_AF_MODE_MACRO:
2037                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2038                         break;
2039                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2040                         // fall-through
2041                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2042                         mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2043                         break;
2044                 }
2045             } else {
2046                 // At least one frame stays in INACTIVE
2047                 if (!afModeChanged) {
2048                     switch (afMode) {
2049                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2050                             // fall-through
2051                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2052                             mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
2053                             break;
2054                     }
2055                 }
2056             }
2057             break;
2058         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2059             /**
2060              * When the AF trigger is activated, the algorithm should finish
2061              * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
2062              * or AF_NOT_FOCUSED as appropriate
2063              */
2064             if (afTriggerStart) {
2065                 // Randomly transition to focused or not focused
2066                 if (rand() % 3) {
2067                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2068                 } else {
2069                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2070                 }
2071             }
2072             /**
2073              * When the AF trigger is not involved, the AF algorithm should
2074              * start in INACTIVE state, and then transition into PASSIVE_SCAN
2075              * and PASSIVE_FOCUSED states
2076              */
2077             else if (!afTriggerCancel) {
2078                // Randomly transition to passive focus
2079                 if (rand() % 3 == 0) {
2080                     mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
2081                 }
2082             }
2083 
2084             break;
2085         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2086             if (afTriggerStart) {
2087                 // Randomly transition to focused or not focused
2088                 if (rand() % 3) {
2089                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2090                 } else {
2091                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2092                 }
2093             }
2094             // TODO: initiate passive scan (PASSIVE_SCAN)
2095             break;
2096         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2097             // Simulate AF sweep completing instantaneously
2098 
2099             // Randomly transition to focused or not focused
2100             if (rand() % 3) {
2101                 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
2102             } else {
2103                 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
2104             }
2105             break;
2106         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2107             if (afTriggerStart) {
2108                 switch (afMode) {
2109                     case ANDROID_CONTROL_AF_MODE_AUTO:
2110                         // fall-through
2111                     case ANDROID_CONTROL_AF_MODE_MACRO:
2112                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2113                         break;
2114                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2115                         // fall-through
2116                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2117                         // continuous autofocus => trigger start has no effect
2118                         break;
2119                 }
2120             }
2121             break;
2122         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2123             if (afTriggerStart) {
2124                 switch (afMode) {
2125                     case ANDROID_CONTROL_AF_MODE_AUTO:
2126                         // fall-through
2127                     case ANDROID_CONTROL_AF_MODE_MACRO:
2128                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
2129                         break;
2130                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
2131                         // fall-through
2132                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
2133                         // continuous autofocus => trigger start has no effect
2134                         break;
2135                 }
2136             }
2137             break;
2138         default:
2139             ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
2140     }
2141 
2142     {
2143         char afStateString[100] = {0,};
2144         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2145                 oldAfState,
2146                 afStateString,
2147                 sizeof(afStateString));
2148 
2149         char afNewStateString[100] = {0,};
2150         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
2151                 mAfState,
2152                 afNewStateString,
2153                 sizeof(afNewStateString));
2154         ALOGVV("%s: AF state transitioned from %s to %s",
2155               __FUNCTION__, afStateString, afNewStateString);
2156     }
2157 
2158 
2159     return OK;
2160 }
2161 
doFakeAWB(CameraMetadata & settings)2162 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
2163     camera_metadata_entry e;
2164 
2165     e = settings.find(ANDROID_CONTROL_AWB_MODE);
2166     if (e.count == 0 && hasCapability(BACKWARD_COMPATIBLE)) {
2167         ALOGE("%s: No AWB mode entry!", __FUNCTION__);
2168         return BAD_VALUE;
2169     }
2170     uint8_t awbMode = (e.count > 0) ? e.data.u8[0] : (uint8_t)ANDROID_CONTROL_AWB_MODE_AUTO;
2171 
2172     // TODO: Add white balance simulation
2173 
2174     switch (awbMode) {
2175         case ANDROID_CONTROL_AWB_MODE_OFF:
2176             mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
2177             return OK;
2178         case ANDROID_CONTROL_AWB_MODE_AUTO:
2179         case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
2180         case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
2181         case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
2182         case ANDROID_CONTROL_AWB_MODE_SHADE:
2183             // OK
2184             break;
2185         default:
2186             ALOGE("%s: Emulator doesn't support AWB mode %d",
2187                     __FUNCTION__, awbMode);
2188             return BAD_VALUE;
2189     }
2190 
2191     return OK;
2192 }
2193 
2194 
update3A(CameraMetadata & settings)2195 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
2196     if (mAeMode != ANDROID_CONTROL_AE_MODE_OFF) {
2197         settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
2198                 &mAeCurrentExposureTime, 1);
2199         settings.update(ANDROID_SENSOR_SENSITIVITY,
2200                 &mAeCurrentSensitivity, 1);
2201     }
2202 
2203     settings.update(ANDROID_CONTROL_AE_STATE,
2204             &mAeState, 1);
2205     settings.update(ANDROID_CONTROL_AF_STATE,
2206             &mAfState, 1);
2207     settings.update(ANDROID_CONTROL_AWB_STATE,
2208             &mAwbState, 1);
2209 
2210     uint8_t lensState;
2211     switch (mAfState) {
2212         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
2213         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
2214             lensState = ANDROID_LENS_STATE_MOVING;
2215             break;
2216         case ANDROID_CONTROL_AF_STATE_INACTIVE:
2217         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
2218         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
2219         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
2220         case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
2221         default:
2222             lensState = ANDROID_LENS_STATE_STATIONARY;
2223             break;
2224     }
2225     settings.update(ANDROID_LENS_STATE, &lensState, 1);
2226 
2227 }
2228 
signalReadoutIdle()2229 void EmulatedFakeCamera3::signalReadoutIdle() {
2230     Mutex::Autolock l(mLock);
2231     // Need to chek isIdle again because waiting on mLock may have allowed
2232     // something to be placed in the in-flight queue.
2233     if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
2234         ALOGV("Now idle");
2235         mStatus = STATUS_READY;
2236     }
2237 }
2238 
onSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)2239 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
2240         nsecs_t timestamp) {
2241     switch(e) {
2242         case Sensor::SensorListener::EXPOSURE_START: {
2243             ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
2244                     __FUNCTION__, frameNumber, timestamp);
2245             // Trigger shutter notify to framework
2246             camera3_notify_msg_t msg;
2247             msg.type = CAMERA3_MSG_SHUTTER;
2248             msg.message.shutter.frame_number = frameNumber;
2249             msg.message.shutter.timestamp = timestamp;
2250             sendNotify(&msg);
2251             break;
2252         }
2253         default:
2254             ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
2255                     e, timestamp);
2256             break;
2257     }
2258 }
2259 
ReadoutThread(EmulatedFakeCamera3 * parent)2260 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
2261         mParent(parent), mJpegWaiting(false) {
2262 }
2263 
~ReadoutThread()2264 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
2265     for (List<Request>::iterator i = mInFlightQueue.begin();
2266          i != mInFlightQueue.end(); i++) {
2267         delete i->buffers;
2268         delete i->sensorBuffers;
2269     }
2270 }
2271 
queueCaptureRequest(const Request & r)2272 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
2273     Mutex::Autolock l(mLock);
2274 
2275     mInFlightQueue.push_back(r);
2276     mInFlightSignal.signal();
2277 }
2278 
isIdle()2279 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
2280     Mutex::Autolock l(mLock);
2281     return mInFlightQueue.empty() && !mThreadActive;
2282 }
2283 
waitForReadout()2284 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
2285     status_t res;
2286     Mutex::Autolock l(mLock);
2287     int loopCount = 0;
2288     while (mInFlightQueue.size() >= kMaxQueueSize) {
2289         res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2290         if (res != OK && res != TIMED_OUT) {
2291             ALOGE("%s: Error waiting for in-flight queue to shrink",
2292                     __FUNCTION__);
2293             return INVALID_OPERATION;
2294         }
2295         if (loopCount == kMaxWaitLoops) {
2296             ALOGE("%s: Timed out waiting for in-flight queue to shrink",
2297                     __FUNCTION__);
2298             return TIMED_OUT;
2299         }
2300         loopCount++;
2301     }
2302     return OK;
2303 }
2304 
threadLoop()2305 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
2306     status_t res;
2307 
2308     ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
2309 
2310     // First wait for a request from the in-flight queue
2311 
2312     if (mCurrentRequest.settings.isEmpty()) {
2313         Mutex::Autolock l(mLock);
2314         if (mInFlightQueue.empty()) {
2315             res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
2316             if (res == TIMED_OUT) {
2317                 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
2318                         __FUNCTION__);
2319                 return true;
2320             } else if (res != NO_ERROR) {
2321                 ALOGE("%s: Error waiting for capture requests: %d",
2322                         __FUNCTION__, res);
2323                 return false;
2324             }
2325         }
2326         mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
2327         mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
2328         mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
2329         mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
2330         mInFlightQueue.erase(mInFlightQueue.begin());
2331         mInFlightSignal.signal();
2332         mThreadActive = true;
2333         ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
2334                 mCurrentRequest.frameNumber);
2335     }
2336 
2337     // Then wait for it to be delivered from the sensor
2338     ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
2339             __FUNCTION__);
2340 
2341     nsecs_t captureTime;
2342     bool gotFrame =
2343             mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
2344     if (!gotFrame) {
2345         ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
2346                 __FUNCTION__);
2347         return true;
2348     }
2349 
2350     ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
2351             mCurrentRequest.frameNumber, captureTime);
2352 
2353     // Check if we need to JPEG encode a buffer, and send it for async
2354     // compression if so. Otherwise prepare the buffer for return.
2355     bool needJpeg = false;
2356     HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
2357     while(buf != mCurrentRequest.buffers->end()) {
2358         bool goodBuffer = true;
2359         if ( buf->stream->format ==
2360                 HAL_PIXEL_FORMAT_BLOB && buf->stream->data_space != HAL_DATASPACE_DEPTH) {
2361             Mutex::Autolock jl(mJpegLock);
2362             if (mJpegWaiting) {
2363                 // This shouldn't happen, because processCaptureRequest should
2364                 // be stalling until JPEG compressor is free.
2365                 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
2366                 goodBuffer = false;
2367             }
2368             if (goodBuffer) {
2369                 // Compressor takes ownership of sensorBuffers here
2370                 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
2371                         this);
2372                 goodBuffer = (res == OK);
2373             }
2374             if (goodBuffer) {
2375                 needJpeg = true;
2376 
2377                 mJpegHalBuffer = *buf;
2378                 mJpegFrameNumber = mCurrentRequest.frameNumber;
2379                 mJpegWaiting = true;
2380 
2381                 mCurrentRequest.sensorBuffers = NULL;
2382                 buf = mCurrentRequest.buffers->erase(buf);
2383 
2384                 continue;
2385             }
2386             ALOGE("%s: Error compressing output buffer: %s (%d)",
2387                         __FUNCTION__, strerror(-res), res);
2388             // fallthrough for cleanup
2389         }
2390         GraphicBufferMapper::get().unlock(*(buf->buffer));
2391 
2392         buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
2393                 CAMERA3_BUFFER_STATUS_ERROR;
2394         buf->acquire_fence = -1;
2395         buf->release_fence = -1;
2396 
2397         ++buf;
2398     } // end while
2399 
2400     // Construct result for all completed buffers and results
2401 
2402     camera3_capture_result result;
2403 
2404     if (mParent->hasCapability(BACKWARD_COMPATIBLE)) {
2405         static const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE;
2406         mCurrentRequest.settings.update(ANDROID_STATISTICS_SCENE_FLICKER,
2407                 &sceneFlicker, 1);
2408 
2409         static const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
2410         mCurrentRequest.settings.update(ANDROID_FLASH_STATE,
2411                 &flashState, 1);
2412 
2413         nsecs_t rollingShutterSkew = Sensor::kFrameDurationRange[0];
2414         mCurrentRequest.settings.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
2415                 &rollingShutterSkew, 1);
2416 
2417         float focusRange[] = { 1.0f/5.0f, 0 }; // 5 m to infinity in focus
2418         mCurrentRequest.settings.update(ANDROID_LENS_FOCUS_RANGE,
2419                 focusRange, sizeof(focusRange)/sizeof(float));
2420     }
2421 
2422     if (mParent->hasCapability(DEPTH_OUTPUT)) {
2423         camera_metadata_entry_t entry;
2424 
2425         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_TRANSLATION, &entry);
2426         mCurrentRequest.settings.update(ANDROID_LENS_POSE_TRANSLATION,
2427                 entry.data.f, entry.count);
2428 
2429         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_POSE_ROTATION, &entry);
2430         mCurrentRequest.settings.update(ANDROID_LENS_POSE_ROTATION,
2431                 entry.data.f, entry.count);
2432 
2433         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_INTRINSIC_CALIBRATION, &entry);
2434         mCurrentRequest.settings.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
2435                 entry.data.f, entry.count);
2436 
2437         find_camera_metadata_entry(mParent->mCameraInfo, ANDROID_LENS_RADIAL_DISTORTION, &entry);
2438         mCurrentRequest.settings.update(ANDROID_LENS_RADIAL_DISTORTION,
2439                 entry.data.f, entry.count);
2440     }
2441 
2442     mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
2443             &captureTime, 1);
2444 
2445 
2446     // JPEGs take a stage longer
2447     const uint8_t pipelineDepth = needJpeg ? kMaxBufferCount : kMaxBufferCount - 1;
2448     mCurrentRequest.settings.update(ANDROID_REQUEST_PIPELINE_DEPTH,
2449             &pipelineDepth, 1);
2450 
2451     result.frame_number = mCurrentRequest.frameNumber;
2452     result.result = mCurrentRequest.settings.getAndLock();
2453     result.num_output_buffers = mCurrentRequest.buffers->size();
2454     result.output_buffers = mCurrentRequest.buffers->array();
2455     result.input_buffer = nullptr;
2456     result.partial_result = 1;
2457 
2458     // Go idle if queue is empty, before sending result
2459     bool signalIdle = false;
2460     {
2461         Mutex::Autolock l(mLock);
2462         if (mInFlightQueue.empty()) {
2463             mThreadActive = false;
2464             signalIdle = true;
2465         }
2466     }
2467     if (signalIdle) mParent->signalReadoutIdle();
2468 
2469     // Send it off to the framework
2470     ALOGVV("%s: ReadoutThread: Send result to framework",
2471             __FUNCTION__);
2472     mParent->sendCaptureResult(&result);
2473 
2474     // Clean up
2475     mCurrentRequest.settings.unlock(result.result);
2476 
2477     delete mCurrentRequest.buffers;
2478     mCurrentRequest.buffers = NULL;
2479     if (!needJpeg) {
2480         delete mCurrentRequest.sensorBuffers;
2481         mCurrentRequest.sensorBuffers = NULL;
2482     }
2483     mCurrentRequest.settings.clear();
2484 
2485     return true;
2486 }
2487 
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2488 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2489         const StreamBuffer &jpegBuffer, bool success) {
2490     Mutex::Autolock jl(mJpegLock);
2491 
2492     GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2493 
2494     mJpegHalBuffer.status = success ?
2495             CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2496     mJpegHalBuffer.acquire_fence = -1;
2497     mJpegHalBuffer.release_fence = -1;
2498     mJpegWaiting = false;
2499 
2500     camera3_capture_result result;
2501     result.frame_number = mJpegFrameNumber;
2502     result.result = NULL;
2503     result.num_output_buffers = 1;
2504     result.output_buffers = &mJpegHalBuffer;
2505 
2506     if (!success) {
2507         ALOGE("%s: Compression failure, returning error state buffer to"
2508                 " framework", __FUNCTION__);
2509     } else {
2510         ALOGV("%s: Compression complete, returning buffer to framework",
2511                 __FUNCTION__);
2512     }
2513 
2514     mParent->sendCaptureResult(&result);
2515 }
2516 
onJpegInputDone(const StreamBuffer & inputBuffer)2517 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2518         const StreamBuffer &inputBuffer) {
2519     // Should never get here, since the input buffer has to be returned
2520     // by end of processCaptureRequest
2521     ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2522 }
2523 
2524 
2525 }; // namespace android
2526