1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 /*
18  * Contains implementation of a class EmulatedFakeCamera3 that encapsulates
19  * functionality of an advanced fake camera.
20  */
21 
22 #include <inttypes.h>
23 
24 //#define LOG_NDEBUG 0
25 //#define LOG_NNDEBUG 0
26 #define LOG_TAG "EmulatedCamera_FakeCamera3"
27 #include <utils/Log.h>
28 
29 #include "EmulatedFakeCamera3.h"
30 #include "EmulatedCameraFactory.h"
31 #include <ui/Fence.h>
32 #include <ui/Rect.h>
33 #include <ui/GraphicBufferMapper.h>
34 #include "gralloc_cb.h"
35 
36 #include "fake-pipeline2/Sensor.h"
37 #include "fake-pipeline2/JpegCompressor.h"
38 #include <cmath>
39 
40 #if defined(LOG_NNDEBUG) && LOG_NNDEBUG == 0
41 #define ALOGVV ALOGV
42 #else
43 #define ALOGVV(...) ((void)0)
44 #endif
45 
46 namespace android {
47 
48 /**
49  * Constants for camera capabilities
50  */
51 
52 const int64_t USEC = 1000LL;
53 const int64_t MSEC = USEC * 1000LL;
54 const int64_t SEC = MSEC * 1000LL;
55 
56 const int32_t EmulatedFakeCamera3::kAvailableFormats[5] = {
57         HAL_PIXEL_FORMAT_RAW_SENSOR,
58         HAL_PIXEL_FORMAT_BLOB,
59         HAL_PIXEL_FORMAT_RGBA_8888,
60         HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
61         // These are handled by YCbCr_420_888
62         //        HAL_PIXEL_FORMAT_YV12,
63         //        HAL_PIXEL_FORMAT_YCrCb_420_SP,
64         HAL_PIXEL_FORMAT_YCbCr_420_888
65 };
66 
67 const uint32_t EmulatedFakeCamera3::kAvailableRawSizes[2] = {
68     640, 480
69     //    Sensor::kResolution[0], Sensor::kResolution[1]
70 };
71 
72 const uint64_t EmulatedFakeCamera3::kAvailableRawMinDurations[1] = {
73     (const uint64_t)Sensor::kFrameDurationRange[0]
74 };
75 
76 const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesBack[4] = {
77     640, 480, 320, 240
78     //    Sensor::kResolution[0], Sensor::kResolution[1]
79 };
80 
81 const uint32_t EmulatedFakeCamera3::kAvailableProcessedSizesFront[4] = {
82     320, 240, 160, 120
83     //    Sensor::kResolution[0], Sensor::kResolution[1]
84 };
85 
86 const uint64_t EmulatedFakeCamera3::kAvailableProcessedMinDurations[1] = {
87     (const uint64_t)Sensor::kFrameDurationRange[0]
88 };
89 
90 const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesBack[2] = {
91     640, 480
92     //    Sensor::kResolution[0], Sensor::kResolution[1]
93 };
94 
95 const uint32_t EmulatedFakeCamera3::kAvailableJpegSizesFront[2] = {
96     320, 240
97     //    Sensor::kResolution[0], Sensor::kResolution[1]
98 };
99 
100 
101 const uint64_t EmulatedFakeCamera3::kAvailableJpegMinDurations[1] = {
102     (const uint64_t)Sensor::kFrameDurationRange[0]
103 };
104 
105 /**
106  * 3A constants
107  */
108 
109 // Default exposure and gain targets for different scenarios
110 const nsecs_t EmulatedFakeCamera3::kNormalExposureTime       = 10 * MSEC;
111 const nsecs_t EmulatedFakeCamera3::kFacePriorityExposureTime = 30 * MSEC;
112 const int     EmulatedFakeCamera3::kNormalSensitivity        = 100;
113 const int     EmulatedFakeCamera3::kFacePrioritySensitivity  = 400;
114 const float   EmulatedFakeCamera3::kExposureTrackRate        = 0.1;
115 const int     EmulatedFakeCamera3::kPrecaptureMinFrames      = 10;
116 const int     EmulatedFakeCamera3::kStableAeMaxFrames        = 100;
117 const float   EmulatedFakeCamera3::kExposureWanderMin        = -2;
118 const float   EmulatedFakeCamera3::kExposureWanderMax        = 1;
119 
120 /**
121  * Camera device lifecycle methods
122  */
123 
EmulatedFakeCamera3(int cameraId,bool facingBack,struct hw_module_t * module)124 EmulatedFakeCamera3::EmulatedFakeCamera3(int cameraId, bool facingBack,
125         struct hw_module_t* module) :
126         EmulatedCamera3(cameraId, module),
127         mFacingBack(facingBack) {
128     ALOGI("Constructing emulated fake camera 3 facing %s",
129             facingBack ? "back" : "front");
130 
131     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
132         mDefaultTemplates[i] = NULL;
133     }
134 
135     /**
136      * Front cameras = limited mode
137      * Back cameras = full mode
138      */
139     mFullMode = facingBack;
140 }
141 
~EmulatedFakeCamera3()142 EmulatedFakeCamera3::~EmulatedFakeCamera3() {
143     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) {
144         if (mDefaultTemplates[i] != NULL) {
145             free_camera_metadata(mDefaultTemplates[i]);
146         }
147     }
148 }
149 
Initialize()150 status_t EmulatedFakeCamera3::Initialize() {
151     ALOGV("%s: E", __FUNCTION__);
152     status_t res;
153 
154     if (mStatus != STATUS_ERROR) {
155         ALOGE("%s: Already initialized!", __FUNCTION__);
156         return INVALID_OPERATION;
157     }
158 
159     res = constructStaticInfo();
160     if (res != OK) {
161         ALOGE("%s: Unable to allocate static info: %s (%d)",
162                 __FUNCTION__, strerror(-res), res);
163         return res;
164     }
165 
166     return EmulatedCamera3::Initialize();
167 }
168 
connectCamera(hw_device_t ** device)169 status_t EmulatedFakeCamera3::connectCamera(hw_device_t** device) {
170     ALOGV("%s: E", __FUNCTION__);
171     Mutex::Autolock l(mLock);
172     status_t res;
173 
174     if (mStatus != STATUS_CLOSED) {
175         ALOGE("%s: Can't connect in state %d", __FUNCTION__, mStatus);
176         return INVALID_OPERATION;
177     }
178 
179     mSensor = new Sensor();
180     mSensor->setSensorListener(this);
181 
182     res = mSensor->startUp();
183     if (res != NO_ERROR) return res;
184 
185     mReadoutThread = new ReadoutThread(this);
186     mJpegCompressor = new JpegCompressor();
187 
188     res = mReadoutThread->run("EmuCam3::readoutThread");
189     if (res != NO_ERROR) return res;
190 
191     // Initialize fake 3A
192 
193     mControlMode  = ANDROID_CONTROL_MODE_AUTO;
194     mFacePriority = false;
195     mAeMode       = ANDROID_CONTROL_AE_MODE_ON;
196     mAfMode       = ANDROID_CONTROL_AF_MODE_AUTO;
197     mAwbMode      = ANDROID_CONTROL_AWB_MODE_AUTO;
198     mAeState      = ANDROID_CONTROL_AE_STATE_INACTIVE;
199     mAfState      = ANDROID_CONTROL_AF_STATE_INACTIVE;
200     mAwbState     = ANDROID_CONTROL_AWB_STATE_INACTIVE;
201     mAfTriggerId  = 0;
202     mAeTriggerId  = 0;
203     mAeCurrentExposureTime = kNormalExposureTime;
204     mAeCurrentSensitivity  = kNormalSensitivity;
205 
206     return EmulatedCamera3::connectCamera(device);
207 }
208 
closeCamera()209 status_t EmulatedFakeCamera3::closeCamera() {
210     ALOGV("%s: E", __FUNCTION__);
211     status_t res;
212     {
213         Mutex::Autolock l(mLock);
214         if (mStatus == STATUS_CLOSED) return OK;
215 
216         res = mSensor->shutDown();
217         if (res != NO_ERROR) {
218             ALOGE("%s: Unable to shut down sensor: %d", __FUNCTION__, res);
219             return res;
220         }
221         mSensor.clear();
222 
223         mReadoutThread->requestExit();
224     }
225 
226     mReadoutThread->join();
227 
228     {
229         Mutex::Autolock l(mLock);
230         // Clear out private stream information
231         for (StreamIterator s = mStreams.begin(); s != mStreams.end(); s++) {
232             PrivateStreamInfo *privStream =
233                     static_cast<PrivateStreamInfo*>((*s)->priv);
234             delete privStream;
235             (*s)->priv = NULL;
236         }
237         mStreams.clear();
238         mReadoutThread.clear();
239     }
240 
241     return EmulatedCamera3::closeCamera();
242 }
243 
getCameraInfo(struct camera_info * info)244 status_t EmulatedFakeCamera3::getCameraInfo(struct camera_info *info) {
245     info->facing = mFacingBack ? CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
246     info->orientation = gEmulatedCameraFactory.getFakeCameraOrientation();
247     return EmulatedCamera3::getCameraInfo(info);
248 }
249 
250 /**
251  * Camera3 interface methods
252  */
253 
configureStreams(camera3_stream_configuration * streamList)254 status_t EmulatedFakeCamera3::configureStreams(
255         camera3_stream_configuration *streamList) {
256     Mutex::Autolock l(mLock);
257     ALOGV("%s: %d streams", __FUNCTION__, streamList->num_streams);
258 
259     if (mStatus != STATUS_OPEN && mStatus != STATUS_READY) {
260         ALOGE("%s: Cannot configure streams in state %d",
261                 __FUNCTION__, mStatus);
262         return NO_INIT;
263     }
264 
265     /**
266      * Sanity-check input list.
267      */
268     if (streamList == NULL) {
269         ALOGE("%s: NULL stream configuration", __FUNCTION__);
270         return BAD_VALUE;
271     }
272 
273     if (streamList->streams == NULL) {
274         ALOGE("%s: NULL stream list", __FUNCTION__);
275         return BAD_VALUE;
276     }
277 
278     if (streamList->num_streams < 1) {
279         ALOGE("%s: Bad number of streams requested: %d", __FUNCTION__,
280                 streamList->num_streams);
281         return BAD_VALUE;
282     }
283 
284     camera3_stream_t *inputStream = NULL;
285     for (size_t i = 0; i < streamList->num_streams; i++) {
286         camera3_stream_t *newStream = streamList->streams[i];
287 
288         if (newStream == NULL) {
289             ALOGE("%s: Stream index %zu was NULL",
290                   __FUNCTION__, i);
291             return BAD_VALUE;
292         }
293 
294         ALOGV("%s: Stream %p (id %zu), type %d, usage 0x%x, format 0x%x",
295                 __FUNCTION__, newStream, i, newStream->stream_type,
296                 newStream->usage,
297                 newStream->format);
298 
299         if (newStream->stream_type == CAMERA3_STREAM_INPUT ||
300             newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
301             if (inputStream != NULL) {
302 
303                 ALOGE("%s: Multiple input streams requested!", __FUNCTION__);
304                 return BAD_VALUE;
305             }
306             inputStream = newStream;
307         }
308 
309         bool validFormat = false;
310         for (size_t f = 0;
311              f < sizeof(kAvailableFormats)/sizeof(kAvailableFormats[0]);
312              f++) {
313             if (newStream->format == kAvailableFormats[f]) {
314                 validFormat = true;
315                 break;
316             }
317         }
318         if (!validFormat) {
319             ALOGE("%s: Unsupported stream format 0x%x requested",
320                     __FUNCTION__, newStream->format);
321             return BAD_VALUE;
322         }
323     }
324     mInputStream = inputStream;
325 
326     /**
327      * Initially mark all existing streams as not alive
328      */
329     for (StreamIterator s = mStreams.begin(); s != mStreams.end(); ++s) {
330         PrivateStreamInfo *privStream =
331                 static_cast<PrivateStreamInfo*>((*s)->priv);
332         privStream->alive = false;
333     }
334 
335     /**
336      * Find new streams and mark still-alive ones
337      */
338     for (size_t i = 0; i < streamList->num_streams; i++) {
339         camera3_stream_t *newStream = streamList->streams[i];
340         if (newStream->priv == NULL) {
341             // New stream, construct info
342             PrivateStreamInfo *privStream = new PrivateStreamInfo();
343             privStream->alive = true;
344             privStream->registered = false;
345 
346             switch (newStream->stream_type) {
347                 case CAMERA3_STREAM_OUTPUT:
348                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
349                     break;
350                 case CAMERA3_STREAM_INPUT:
351                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
352                     break;
353                 case CAMERA3_STREAM_BIDIRECTIONAL:
354                     newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
355                             GRALLOC_USAGE_HW_CAMERA_WRITE;
356                     break;
357             }
358             newStream->max_buffers = kMaxBufferCount;
359             newStream->priv = privStream;
360             mStreams.push_back(newStream);
361         } else {
362             // Existing stream, mark as still alive.
363             PrivateStreamInfo *privStream =
364                     static_cast<PrivateStreamInfo*>(newStream->priv);
365             privStream->alive = true;
366         }
367     }
368 
369     /**
370      * Reap the dead streams
371      */
372     for (StreamIterator s = mStreams.begin(); s != mStreams.end();) {
373         PrivateStreamInfo *privStream =
374                 static_cast<PrivateStreamInfo*>((*s)->priv);
375         if (!privStream->alive) {
376             (*s)->priv = NULL;
377             delete privStream;
378             s = mStreams.erase(s);
379         } else {
380             ++s;
381         }
382     }
383 
384     /**
385      * Can't reuse settings across configure call
386      */
387     mPrevSettings.clear();
388 
389     return OK;
390 }
391 
registerStreamBuffers(const camera3_stream_buffer_set * bufferSet)392 status_t EmulatedFakeCamera3::registerStreamBuffers(
393         const camera3_stream_buffer_set *bufferSet) {
394     ALOGV("%s: E", __FUNCTION__);
395     Mutex::Autolock l(mLock);
396 
397     /**
398      * Sanity checks
399      */
400 
401     // OK: register streams at any time during configure
402     // (but only once per stream)
403     if (mStatus != STATUS_READY && mStatus != STATUS_ACTIVE) {
404         ALOGE("%s: Cannot register buffers in state %d",
405                 __FUNCTION__, mStatus);
406         return NO_INIT;
407     }
408 
409     if (bufferSet == NULL) {
410         ALOGE("%s: NULL buffer set!", __FUNCTION__);
411         return BAD_VALUE;
412     }
413 
414     StreamIterator s = mStreams.begin();
415     for (; s != mStreams.end(); ++s) {
416         if (bufferSet->stream == *s) break;
417     }
418     if (s == mStreams.end()) {
419         ALOGE("%s: Trying to register buffers for a non-configured stream!",
420                 __FUNCTION__);
421         return BAD_VALUE;
422     }
423 
424     /**
425      * Register the buffers. This doesn't mean anything to the emulator besides
426      * marking them off as registered.
427      */
428 
429     PrivateStreamInfo *privStream =
430             static_cast<PrivateStreamInfo*>((*s)->priv);
431 
432     if (privStream->registered) {
433         ALOGE("%s: Illegal to register buffer more than once", __FUNCTION__);
434         return BAD_VALUE;
435     }
436 
437     privStream->registered = true;
438 
439     return OK;
440 }
441 
constructDefaultRequestSettings(int type)442 const camera_metadata_t* EmulatedFakeCamera3::constructDefaultRequestSettings(
443         int type) {
444     ALOGV("%s: E", __FUNCTION__);
445     Mutex::Autolock l(mLock);
446 
447     if (type < 0 || type >= CAMERA2_TEMPLATE_COUNT) {
448         ALOGE("%s: Unknown request settings template: %d",
449                 __FUNCTION__, type);
450         return NULL;
451     }
452 
453     /**
454      * Cache is not just an optimization - pointer returned has to live at
455      * least as long as the camera device instance does.
456      */
457     if (mDefaultTemplates[type] != NULL) {
458         return mDefaultTemplates[type];
459     }
460 
461     CameraMetadata settings;
462 
463     /** android.request */
464 
465     static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
466     settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
467 
468     static const uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
469     settings.update(ANDROID_REQUEST_METADATA_MODE, &metadataMode, 1);
470 
471     static const int32_t id = 0;
472     settings.update(ANDROID_REQUEST_ID, &id, 1);
473 
474     static const int32_t frameCount = 0;
475     settings.update(ANDROID_REQUEST_FRAME_COUNT, &frameCount, 1);
476 
477     /** android.lens */
478 
479     static const float focusDistance = 0;
480     settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focusDistance, 1);
481 
482     static const float aperture = 2.8f;
483     settings.update(ANDROID_LENS_APERTURE, &aperture, 1);
484 
485     static const float focalLength = 5.0f;
486     settings.update(ANDROID_LENS_FOCAL_LENGTH, &focalLength, 1);
487 
488     static const float filterDensity = 0;
489     settings.update(ANDROID_LENS_FILTER_DENSITY, &filterDensity, 1);
490 
491     static const uint8_t opticalStabilizationMode =
492             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
493     settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
494             &opticalStabilizationMode, 1);
495 
496     // FOCUS_RANGE set only in frame
497 
498     /** android.sensor */
499 
500     static const int64_t exposureTime = 10 * MSEC;
501     settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &exposureTime, 1);
502 
503     static const int64_t frameDuration = 33333333L; // 1/30 s
504     settings.update(ANDROID_SENSOR_FRAME_DURATION, &frameDuration, 1);
505 
506     static const int32_t sensitivity = 100;
507     settings.update(ANDROID_SENSOR_SENSITIVITY, &sensitivity, 1);
508 
509     // TIMESTAMP set only in frame
510 
511     /** android.flash */
512 
513     static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
514     settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
515 
516     static const uint8_t flashPower = 10;
517     settings.update(ANDROID_FLASH_FIRING_POWER, &flashPower, 1);
518 
519     static const int64_t firingTime = 0;
520     settings.update(ANDROID_FLASH_FIRING_TIME, &firingTime, 1);
521 
522     /** Processing block modes */
523     uint8_t hotPixelMode = 0;
524     uint8_t demosaicMode = 0;
525     uint8_t noiseMode = 0;
526     uint8_t shadingMode = 0;
527     uint8_t colorMode = 0;
528     uint8_t tonemapMode = 0;
529     uint8_t edgeMode = 0;
530     switch (type) {
531       case CAMERA2_TEMPLATE_STILL_CAPTURE:
532         // fall-through
533       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
534         // fall-through
535       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
536         hotPixelMode = ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY;
537         demosaicMode = ANDROID_DEMOSAIC_MODE_HIGH_QUALITY;
538         noiseMode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
539         shadingMode = ANDROID_SHADING_MODE_HIGH_QUALITY;
540         colorMode = ANDROID_COLOR_CORRECTION_MODE_HIGH_QUALITY;
541         tonemapMode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
542         edgeMode = ANDROID_EDGE_MODE_HIGH_QUALITY;
543         break;
544       case CAMERA2_TEMPLATE_PREVIEW:
545         // fall-through
546       case CAMERA2_TEMPLATE_VIDEO_RECORD:
547         // fall-through
548       default:
549         hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
550         demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
551         noiseMode = ANDROID_NOISE_REDUCTION_MODE_FAST;
552         shadingMode = ANDROID_SHADING_MODE_FAST;
553         colorMode = ANDROID_COLOR_CORRECTION_MODE_FAST;
554         tonemapMode = ANDROID_TONEMAP_MODE_FAST;
555         edgeMode = ANDROID_EDGE_MODE_FAST;
556         break;
557     }
558     settings.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
559     settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
560     settings.update(ANDROID_NOISE_REDUCTION_MODE, &noiseMode, 1);
561     settings.update(ANDROID_SHADING_MODE, &shadingMode, 1);
562     settings.update(ANDROID_COLOR_CORRECTION_MODE, &colorMode, 1);
563     settings.update(ANDROID_TONEMAP_MODE, &tonemapMode, 1);
564     settings.update(ANDROID_EDGE_MODE, &edgeMode, 1);
565 
566     /** android.noise */
567     static const uint8_t noiseStrength = 5;
568     settings.update(ANDROID_NOISE_REDUCTION_STRENGTH, &noiseStrength, 1);
569 
570     /** android.color */
571     static const float colorTransform[9] = {
572         1.0f, 0.f, 0.f,
573         0.f, 1.f, 0.f,
574         0.f, 0.f, 1.f
575     };
576     settings.update(ANDROID_COLOR_CORRECTION_TRANSFORM, colorTransform, 9);
577 
578     /** android.tonemap */
579     static const float tonemapCurve[4] = {
580         0.f, 0.f,
581         1.f, 1.f
582     };
583     settings.update(ANDROID_TONEMAP_CURVE_RED, tonemapCurve, 4);
584     settings.update(ANDROID_TONEMAP_CURVE_GREEN, tonemapCurve, 4);
585     settings.update(ANDROID_TONEMAP_CURVE_BLUE, tonemapCurve, 4);
586 
587     /** android.edge */
588     static const uint8_t edgeStrength = 5;
589     settings.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
590 
591     /** android.scaler */
592     static const int32_t cropRegion[3] = {
593         0, 0, (int32_t)Sensor::kResolution[0]
594     };
595     settings.update(ANDROID_SCALER_CROP_REGION, cropRegion, 3);
596 
597     /** android.jpeg */
598     static const uint8_t jpegQuality = 80;
599     settings.update(ANDROID_JPEG_QUALITY, &jpegQuality, 1);
600 
601     static const int32_t thumbnailSize[2] = {
602         640, 480
603     };
604     settings.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2);
605 
606     static const uint8_t thumbnailQuality = 80;
607     settings.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &thumbnailQuality, 1);
608 
609     static const double gpsCoordinates[2] = {
610         0, 0
611     };
612     settings.update(ANDROID_JPEG_GPS_COORDINATES, gpsCoordinates, 2);
613 
614     static const uint8_t gpsProcessingMethod[32] = "None";
615     settings.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, gpsProcessingMethod, 32);
616 
617     static const int64_t gpsTimestamp = 0;
618     settings.update(ANDROID_JPEG_GPS_TIMESTAMP, &gpsTimestamp, 1);
619 
620     static const int32_t jpegOrientation = 0;
621     settings.update(ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1);
622 
623     /** android.stats */
624 
625     static const uint8_t faceDetectMode =
626         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
627     settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
628 
629     static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
630     settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
631 
632     static const uint8_t sharpnessMapMode =
633         ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
634     settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
635 
636     // faceRectangles, faceScores, faceLandmarks, faceIds, histogram,
637     // sharpnessMap only in frames
638 
639     /** android.control */
640 
641     uint8_t controlIntent = 0;
642     switch (type) {
643       case CAMERA2_TEMPLATE_PREVIEW:
644         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
645         break;
646       case CAMERA2_TEMPLATE_STILL_CAPTURE:
647         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
648         break;
649       case CAMERA2_TEMPLATE_VIDEO_RECORD:
650         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
651         break;
652       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
653         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
654         break;
655       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
656         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
657         break;
658       default:
659         controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
660         break;
661     }
662     settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
663 
664     static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
665     settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
666 
667     static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
668     settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
669 
670     static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
671     settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
672 
673     static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
674     settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
675 
676     static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
677     settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
678 
679     static const int32_t controlRegions[5] = {
680         0, 0, (int32_t)Sensor::kResolution[0], (int32_t)Sensor::kResolution[1],
681         1000
682     };
683     settings.update(ANDROID_CONTROL_AE_REGIONS, controlRegions, 5);
684 
685     static const int32_t aeExpCompensation = 0;
686     settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &aeExpCompensation, 1);
687 
688     static const int32_t aeTargetFpsRange[2] = {
689         10, 30
690     };
691     settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, aeTargetFpsRange, 2);
692 
693     static const uint8_t aeAntibandingMode =
694             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
695     settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &aeAntibandingMode, 1);
696 
697     static const uint8_t awbMode =
698             ANDROID_CONTROL_AWB_MODE_AUTO;
699     settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
700 
701     static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
702     settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
703 
704     settings.update(ANDROID_CONTROL_AWB_REGIONS, controlRegions, 5);
705 
706     uint8_t afMode = 0;
707     switch (type) {
708       case CAMERA2_TEMPLATE_PREVIEW:
709         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
710         break;
711       case CAMERA2_TEMPLATE_STILL_CAPTURE:
712         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
713         break;
714       case CAMERA2_TEMPLATE_VIDEO_RECORD:
715         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
716         break;
717       case CAMERA2_TEMPLATE_VIDEO_SNAPSHOT:
718         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
719         break;
720       case CAMERA2_TEMPLATE_ZERO_SHUTTER_LAG:
721         afMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
722         break;
723       default:
724         afMode = ANDROID_CONTROL_AF_MODE_AUTO;
725         break;
726     }
727     settings.update(ANDROID_CONTROL_AF_MODE, &afMode, 1);
728 
729     settings.update(ANDROID_CONTROL_AF_REGIONS, controlRegions, 5);
730 
731     static const uint8_t vstabMode =
732         ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
733     settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vstabMode, 1);
734 
735     // aeState, awbState, afState only in frame
736 
737     mDefaultTemplates[type] = settings.release();
738 
739     return mDefaultTemplates[type];
740 }
741 
processCaptureRequest(camera3_capture_request * request)742 status_t EmulatedFakeCamera3::processCaptureRequest(
743         camera3_capture_request *request) {
744 
745     Mutex::Autolock l(mLock);
746     status_t res;
747 
748     /** Validation */
749 
750     if (mStatus < STATUS_READY) {
751         ALOGE("%s: Can't submit capture requests in state %d", __FUNCTION__,
752                 mStatus);
753         return INVALID_OPERATION;
754     }
755 
756     if (request == NULL) {
757         ALOGE("%s: NULL request!", __FUNCTION__);
758         return BAD_VALUE;
759     }
760 
761     uint32_t frameNumber = request->frame_number;
762 
763     if (request->settings == NULL && mPrevSettings.isEmpty()) {
764         ALOGE("%s: Request %d: NULL settings for first request after"
765                 "configureStreams()", __FUNCTION__, frameNumber);
766         return BAD_VALUE;
767     }
768 
769     if (request->input_buffer != NULL &&
770             request->input_buffer->stream != mInputStream) {
771         ALOGE("%s: Request %d: Input buffer not from input stream!",
772                 __FUNCTION__, frameNumber);
773         ALOGV("%s: Bad stream %p, expected: %p",
774               __FUNCTION__, request->input_buffer->stream,
775               mInputStream);
776         ALOGV("%s: Bad stream type %d, expected stream type %d",
777               __FUNCTION__, request->input_buffer->stream->stream_type,
778               mInputStream ? mInputStream->stream_type : -1);
779 
780         return BAD_VALUE;
781     }
782 
783     if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
784         ALOGE("%s: Request %d: No output buffers provided!",
785                 __FUNCTION__, frameNumber);
786         return BAD_VALUE;
787     }
788 
789     // Validate all buffers, starting with input buffer if it's given
790 
791     ssize_t idx;
792     const camera3_stream_buffer_t *b;
793     if (request->input_buffer != NULL) {
794         idx = -1;
795         b = request->input_buffer;
796     } else {
797         idx = 0;
798         b = request->output_buffers;
799     }
800     do {
801         PrivateStreamInfo *priv =
802                 static_cast<PrivateStreamInfo*>(b->stream->priv);
803         if (priv == NULL) {
804             ALOGE("%s: Request %d: Buffer %zu: Unconfigured stream!",
805                     __FUNCTION__, frameNumber, idx);
806             return BAD_VALUE;
807         }
808         if (!priv->alive || !priv->registered) {
809             ALOGE("%s: Request %d: Buffer %zu: Unregistered or dead stream!",
810                     __FUNCTION__, frameNumber, idx);
811             return BAD_VALUE;
812         }
813         if (b->status != CAMERA3_BUFFER_STATUS_OK) {
814             ALOGE("%s: Request %d: Buffer %zu: Status not OK!",
815                     __FUNCTION__, frameNumber, idx);
816             return BAD_VALUE;
817         }
818         if (b->release_fence != -1) {
819             ALOGE("%s: Request %d: Buffer %zu: Has a release fence!",
820                     __FUNCTION__, frameNumber, idx);
821             return BAD_VALUE;
822         }
823         if (b->buffer == NULL) {
824             ALOGE("%s: Request %d: Buffer %zu: NULL buffer handle!",
825                     __FUNCTION__, frameNumber, idx);
826             return BAD_VALUE;
827         }
828         idx++;
829         b = &(request->output_buffers[idx]);
830     } while (idx < (ssize_t)request->num_output_buffers);
831 
832     // TODO: Validate settings parameters
833 
834     /**
835      * Start processing this request
836      */
837 
838     mStatus = STATUS_ACTIVE;
839 
840     CameraMetadata settings;
841 
842     if (request->settings == NULL) {
843         settings.acquire(mPrevSettings);
844     } else {
845         settings = request->settings;
846     }
847 
848     res = process3A(settings);
849     if (res != OK) {
850         return res;
851     }
852 
853     // TODO: Handle reprocessing
854 
855     /**
856      * Get ready for sensor config
857      */
858 
859     nsecs_t  exposureTime;
860     nsecs_t  frameDuration;
861     uint32_t sensitivity;
862     bool     needJpeg = false;
863 
864     exposureTime = settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
865     frameDuration = settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
866     sensitivity = settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
867 
868     Buffers *sensorBuffers = new Buffers();
869     HalBufferVector *buffers = new HalBufferVector();
870 
871     sensorBuffers->setCapacity(request->num_output_buffers);
872     buffers->setCapacity(request->num_output_buffers);
873 
874     // Process all the buffers we got for output, constructing internal buffer
875     // structures for them, and lock them for writing.
876     for (size_t i = 0; i < request->num_output_buffers; i++) {
877         const camera3_stream_buffer &srcBuf = request->output_buffers[i];
878         const cb_handle_t *privBuffer =
879                 static_cast<const cb_handle_t*>(*srcBuf.buffer);
880         StreamBuffer destBuf;
881         destBuf.streamId = kGenericStreamId;
882         destBuf.width    = srcBuf.stream->width;
883         destBuf.height   = srcBuf.stream->height;
884         destBuf.format   = privBuffer->format; // Use real private format
885         destBuf.stride   = srcBuf.stream->width; // TODO: query from gralloc
886         destBuf.buffer   = srcBuf.buffer;
887 
888         if (destBuf.format == HAL_PIXEL_FORMAT_BLOB) {
889             needJpeg = true;
890         }
891 
892         // Wait on fence
893         sp<Fence> bufferAcquireFence = new Fence(srcBuf.acquire_fence);
894         res = bufferAcquireFence->wait(kFenceTimeoutMs);
895         if (res == TIMED_OUT) {
896             ALOGE("%s: Request %d: Buffer %zu: Fence timed out after %d ms",
897                     __FUNCTION__, frameNumber, i, kFenceTimeoutMs);
898         }
899         if (res == OK) {
900             // Lock buffer for writing
901             const Rect rect(destBuf.width, destBuf.height);
902             if (srcBuf.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
903                 if (privBuffer->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
904                     android_ycbcr ycbcr = android_ycbcr();
905                     res = GraphicBufferMapper::get().lockYCbCr(
906                         *(destBuf.buffer),
907                         GRALLOC_USAGE_HW_CAMERA_WRITE, rect,
908                         &ycbcr);
909                     // This is only valid because we know that emulator's
910                     // YCbCr_420_888 is really contiguous NV21 under the hood
911                     destBuf.img = static_cast<uint8_t*>(ycbcr.y);
912                 } else {
913                     ALOGE("Unexpected private format for flexible YUV: 0x%x",
914                             privBuffer->format);
915                     res = INVALID_OPERATION;
916                 }
917             } else {
918                 res = GraphicBufferMapper::get().lock(*(destBuf.buffer),
919                         GRALLOC_USAGE_HW_CAMERA_WRITE, rect,
920                         (void**)&(destBuf.img));
921             }
922             if (res != OK) {
923                 ALOGE("%s: Request %d: Buffer %zu: Unable to lock buffer",
924                         __FUNCTION__, frameNumber, i);
925             }
926         }
927 
928         if (res != OK) {
929             // Either waiting or locking failed. Unlock locked buffers and bail
930             // out.
931             for (size_t j = 0; j < i; j++) {
932                 GraphicBufferMapper::get().unlock(
933                         *(request->output_buffers[i].buffer));
934             }
935             return NO_INIT;
936         }
937 
938         sensorBuffers->push_back(destBuf);
939         buffers->push_back(srcBuf);
940     }
941 
942     /**
943      * Wait for JPEG compressor to not be busy, if needed
944      */
945     if (needJpeg) {
946         bool ready = mJpegCompressor->waitForDone(kFenceTimeoutMs);
947         if (!ready) {
948             ALOGE("%s: Timeout waiting for JPEG compression to complete!",
949                     __FUNCTION__);
950             return NO_INIT;
951         }
952     }
953 
954     /**
955      * Wait until the in-flight queue has room
956      */
957     res = mReadoutThread->waitForReadout();
958     if (res != OK) {
959         ALOGE("%s: Timeout waiting for previous requests to complete!",
960                 __FUNCTION__);
961         return NO_INIT;
962     }
963 
964     /**
965      * Wait until sensor's ready. This waits for lengthy amounts of time with
966      * mLock held, but the interface spec is that no other calls may by done to
967      * the HAL by the framework while process_capture_request is happening.
968      */
969     int syncTimeoutCount = 0;
970     while(!mSensor->waitForVSync(kSyncWaitTimeout)) {
971         if (mStatus == STATUS_ERROR) {
972             return NO_INIT;
973         }
974         if (syncTimeoutCount == kMaxSyncTimeoutCount) {
975             ALOGE("%s: Request %d: Sensor sync timed out after %" PRId64 " ms",
976                     __FUNCTION__, frameNumber,
977                     kSyncWaitTimeout * kMaxSyncTimeoutCount / 1000000);
978             return NO_INIT;
979         }
980         syncTimeoutCount++;
981     }
982 
983     /**
984      * Configure sensor and queue up the request to the readout thread
985      */
986     mSensor->setExposureTime(exposureTime);
987     mSensor->setFrameDuration(frameDuration);
988     mSensor->setSensitivity(sensitivity);
989     mSensor->setDestinationBuffers(sensorBuffers);
990     mSensor->setFrameNumber(request->frame_number);
991 
992     ReadoutThread::Request r;
993     r.frameNumber = request->frame_number;
994     r.settings = settings;
995     r.sensorBuffers = sensorBuffers;
996     r.buffers = buffers;
997 
998     mReadoutThread->queueCaptureRequest(r);
999     ALOGVV("%s: Queued frame %d", __FUNCTION__, request->frame_number);
1000 
1001     // Cache the settings for next time
1002     mPrevSettings.acquire(settings);
1003 
1004     return OK;
1005 }
1006 
1007 /** Debug methods */
1008 
dump(int fd)1009 void EmulatedFakeCamera3::dump(int fd) {
1010 
1011 }
1012 
1013 /** Tag query methods */
getVendorSectionName(uint32_t tag)1014 const char* EmulatedFakeCamera3::getVendorSectionName(uint32_t tag) {
1015     return NULL;
1016 }
1017 
getVendorTagName(uint32_t tag)1018 const char* EmulatedFakeCamera3::getVendorTagName(uint32_t tag) {
1019     return NULL;
1020 }
1021 
getVendorTagType(uint32_t tag)1022 int EmulatedFakeCamera3::getVendorTagType(uint32_t tag) {
1023     return 0;
1024 }
1025 
1026 /**
1027  * Private methods
1028  */
1029 
constructStaticInfo()1030 status_t EmulatedFakeCamera3::constructStaticInfo() {
1031 
1032     CameraMetadata info;
1033     // android.lens
1034 
1035     // 5 cm min focus distance for back camera, infinity (fixed focus) for front
1036     const float minFocusDistance = mFacingBack ? 1.0/0.05 : 0.0;
1037     info.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
1038             &minFocusDistance, 1);
1039 
1040     // 5 m hyperfocal distance for back camera, infinity (fixed focus) for front
1041     const float hyperFocalDistance = mFacingBack ? 1.0/5.0 : 0.0;
1042     info.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
1043             &minFocusDistance, 1);
1044 
1045     static const float focalLength = 3.30f; // mm
1046     info.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
1047             &focalLength, 1);
1048     static const float aperture = 2.8f;
1049     info.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
1050             &aperture, 1);
1051     static const float filterDensity = 0;
1052     info.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
1053             &filterDensity, 1);
1054     static const uint8_t availableOpticalStabilization =
1055             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
1056     info.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
1057             &availableOpticalStabilization, 1);
1058 
1059     static const int32_t lensShadingMapSize[] = {1, 1};
1060     info.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, lensShadingMapSize,
1061             sizeof(lensShadingMapSize)/sizeof(int32_t));
1062 
1063     uint8_t lensFacing = mFacingBack ?
1064             ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
1065     info.update(ANDROID_LENS_FACING, &lensFacing, 1);
1066 
1067     float lensPosition[3];
1068     if (mFacingBack) {
1069         // Back-facing camera is center-top on device
1070         lensPosition[0] = 0;
1071         lensPosition[1] = 20;
1072         lensPosition[2] = -5;
1073     } else {
1074         // Front-facing camera is center-right on device
1075         lensPosition[0] = 20;
1076         lensPosition[1] = 20;
1077         lensPosition[2] = 0;
1078     }
1079     info.update(ANDROID_LENS_POSITION, lensPosition, sizeof(lensPosition)/
1080             sizeof(float));
1081 
1082     // android.sensor
1083 
1084     info.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
1085             Sensor::kExposureTimeRange, 2);
1086 
1087     info.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
1088             &Sensor::kFrameDurationRange[1], 1);
1089 
1090     info.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
1091             Sensor::kSensitivityRange,
1092             sizeof(Sensor::kSensitivityRange)
1093             /sizeof(int32_t));
1094 
1095     info.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
1096             &Sensor::kColorFilterArrangement, 1);
1097 
1098     static const float sensorPhysicalSize[2] = {3.20f, 2.40f}; // mm
1099     info.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
1100             sensorPhysicalSize, 2);
1101 
1102     info.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
1103             (int32_t*)Sensor::kResolution, 2);
1104 
1105     info.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
1106             (int32_t*)Sensor::kResolution, 2);
1107 
1108     info.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
1109             (int32_t*)&Sensor::kMaxRawValue, 1);
1110 
1111     static const int32_t blackLevelPattern[4] = {
1112             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel,
1113             (int32_t)Sensor::kBlackLevel, (int32_t)Sensor::kBlackLevel
1114     };
1115     info.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
1116             blackLevelPattern, sizeof(blackLevelPattern)/sizeof(int32_t));
1117 
1118     static const int32_t orientation = 0; // unrotated (0 degrees)
1119     info.update(ANDROID_SENSOR_ORIENTATION, &orientation, 1);
1120 
1121     //TODO: sensor color calibration fields
1122 
1123     // android.flash
1124     static const uint8_t flashAvailable = 0;
1125     info.update(ANDROID_FLASH_INFO_AVAILABLE, &flashAvailable, 1);
1126 
1127     static const int64_t flashChargeDuration = 0;
1128     info.update(ANDROID_FLASH_INFO_CHARGE_DURATION, &flashChargeDuration, 1);
1129 
1130     // android.tonemap
1131 
1132     static const int32_t tonemapCurvePoints = 128;
1133     info.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, &tonemapCurvePoints, 1);
1134 
1135     // android.scaler
1136 
1137     info.update(ANDROID_SCALER_AVAILABLE_FORMATS,
1138             kAvailableFormats,
1139             sizeof(kAvailableFormats)/sizeof(int32_t));
1140 
1141     info.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
1142             (int32_t*)kAvailableRawSizes,
1143             sizeof(kAvailableRawSizes)/sizeof(uint32_t));
1144 
1145     info.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
1146             (int64_t*)kAvailableRawMinDurations,
1147             sizeof(kAvailableRawMinDurations)/sizeof(uint64_t));
1148 
1149     if (mFacingBack) {
1150         info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1151                 (int32_t*)kAvailableProcessedSizesBack,
1152                 sizeof(kAvailableProcessedSizesBack)/sizeof(uint32_t));
1153     } else {
1154         info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
1155                 (int32_t*)kAvailableProcessedSizesFront,
1156                 sizeof(kAvailableProcessedSizesFront)/sizeof(uint32_t));
1157     }
1158 
1159     info.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
1160             (int64_t*)kAvailableProcessedMinDurations,
1161             sizeof(kAvailableProcessedMinDurations)/sizeof(uint64_t));
1162 
1163     if (mFacingBack) {
1164         info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1165                 (int32_t*)kAvailableJpegSizesBack,
1166                 sizeof(kAvailableJpegSizesBack)/sizeof(uint32_t));
1167     } else {
1168         info.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
1169                 (int32_t*)kAvailableJpegSizesFront,
1170                 sizeof(kAvailableJpegSizesFront)/sizeof(uint32_t));
1171     }
1172 
1173     info.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
1174             (int64_t*)kAvailableJpegMinDurations,
1175             sizeof(kAvailableJpegMinDurations)/sizeof(uint64_t));
1176 
1177     static const float maxZoom = 10;
1178     info.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
1179             &maxZoom, 1);
1180 
1181     // android.jpeg
1182 
1183     static const int32_t jpegThumbnailSizes[] = {
1184             0, 0,
1185             160, 120,
1186             320, 240
1187      };
1188     info.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
1189             jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
1190 
1191     static const int32_t jpegMaxSize = JpegCompressor::kMaxJpegSize;
1192     info.update(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
1193 
1194     // android.stats
1195 
1196     static const uint8_t availableFaceDetectModes[] = {
1197         ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
1198         ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE,
1199         ANDROID_STATISTICS_FACE_DETECT_MODE_FULL
1200     };
1201 
1202     info.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
1203             availableFaceDetectModes,
1204             sizeof(availableFaceDetectModes));
1205 
1206     static const int32_t maxFaceCount = 8;
1207     info.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
1208             &maxFaceCount, 1);
1209 
1210     static const int32_t histogramSize = 64;
1211     info.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
1212             &histogramSize, 1);
1213 
1214     static const int32_t maxHistogramCount = 1000;
1215     info.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
1216             &maxHistogramCount, 1);
1217 
1218     static const int32_t sharpnessMapSize[2] = {64, 64};
1219     info.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
1220             sharpnessMapSize, sizeof(sharpnessMapSize)/sizeof(int32_t));
1221 
1222     static const int32_t maxSharpnessMapValue = 1000;
1223     info.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
1224             &maxSharpnessMapValue, 1);
1225 
1226     // android.control
1227 
1228     static const uint8_t availableSceneModes[] = {
1229             ANDROID_CONTROL_SCENE_MODE_DISABLED
1230     };
1231     info.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
1232             availableSceneModes, sizeof(availableSceneModes));
1233 
1234     static const uint8_t availableEffects[] = {
1235             ANDROID_CONTROL_EFFECT_MODE_OFF
1236     };
1237     info.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
1238             availableEffects, sizeof(availableEffects));
1239 
1240     static const int32_t max3aRegions[] = {/*AE*/ 0,/*AWB*/ 0,/*AF*/ 0};
1241     info.update(ANDROID_CONTROL_MAX_REGIONS,
1242             max3aRegions, sizeof(max3aRegions)/sizeof(max3aRegions[0]));
1243 
1244     static const uint8_t availableAeModes[] = {
1245             ANDROID_CONTROL_AE_MODE_OFF,
1246             ANDROID_CONTROL_AE_MODE_ON
1247     };
1248     info.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
1249             availableAeModes, sizeof(availableAeModes));
1250 
1251     static const camera_metadata_rational exposureCompensationStep = {
1252             1, 3
1253     };
1254     info.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
1255             &exposureCompensationStep, 1);
1256 
1257     int32_t exposureCompensationRange[] = {-9, 9};
1258     info.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
1259             exposureCompensationRange,
1260             sizeof(exposureCompensationRange)/sizeof(int32_t));
1261 
1262     static const int32_t availableTargetFpsRanges[] = {
1263             5, 30, 15, 30
1264     };
1265     info.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
1266             availableTargetFpsRanges,
1267             sizeof(availableTargetFpsRanges)/sizeof(int32_t));
1268 
1269     static const uint8_t availableAntibandingModes[] = {
1270             ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
1271             ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO
1272     };
1273     info.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
1274             availableAntibandingModes, sizeof(availableAntibandingModes));
1275 
1276     static const uint8_t availableAwbModes[] = {
1277             ANDROID_CONTROL_AWB_MODE_OFF,
1278             ANDROID_CONTROL_AWB_MODE_AUTO,
1279             ANDROID_CONTROL_AWB_MODE_INCANDESCENT,
1280             ANDROID_CONTROL_AWB_MODE_FLUORESCENT,
1281             ANDROID_CONTROL_AWB_MODE_DAYLIGHT,
1282             ANDROID_CONTROL_AWB_MODE_SHADE
1283     };
1284     info.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
1285             availableAwbModes, sizeof(availableAwbModes));
1286 
1287     static const uint8_t availableAfModesBack[] = {
1288             ANDROID_CONTROL_AF_MODE_OFF,
1289             ANDROID_CONTROL_AF_MODE_AUTO,
1290             ANDROID_CONTROL_AF_MODE_MACRO,
1291             ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,
1292             ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE
1293     };
1294 
1295     static const uint8_t availableAfModesFront[] = {
1296             ANDROID_CONTROL_AF_MODE_OFF
1297     };
1298 
1299     if (mFacingBack) {
1300         info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1301                     availableAfModesBack, sizeof(availableAfModesBack));
1302     } else {
1303         info.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
1304                     availableAfModesFront, sizeof(availableAfModesFront));
1305     }
1306 
1307     static const uint8_t availableVstabModes[] = {
1308             ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF
1309     };
1310     info.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
1311             availableVstabModes, sizeof(availableVstabModes));
1312 
1313     // android.info
1314     const uint8_t supportedHardwareLevel =
1315         mFullMode ? ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL :
1316                     ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
1317     info.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
1318                 &supportedHardwareLevel,
1319                 /*count*/1);
1320 
1321     mCameraInfo = info.release();
1322 
1323     return OK;
1324 }
1325 
process3A(CameraMetadata & settings)1326 status_t EmulatedFakeCamera3::process3A(CameraMetadata &settings) {
1327     /**
1328      * Extract top-level 3A controls
1329      */
1330     status_t res;
1331 
1332     bool facePriority = false;
1333 
1334     camera_metadata_entry e;
1335 
1336     e = settings.find(ANDROID_CONTROL_MODE);
1337     if (e.count == 0) {
1338         ALOGE("%s: No control mode entry!", __FUNCTION__);
1339         return BAD_VALUE;
1340     }
1341     uint8_t controlMode = e.data.u8[0];
1342 
1343     e = settings.find(ANDROID_CONTROL_SCENE_MODE);
1344     if (e.count == 0) {
1345         ALOGE("%s: No scene mode entry!", __FUNCTION__);
1346         return BAD_VALUE;
1347     }
1348     uint8_t sceneMode = e.data.u8[0];
1349 
1350     if (controlMode == ANDROID_CONTROL_MODE_OFF) {
1351         mAeState  = ANDROID_CONTROL_AE_STATE_INACTIVE;
1352         mAfState  = ANDROID_CONTROL_AF_STATE_INACTIVE;
1353         mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1354         update3A(settings);
1355         return OK;
1356     } else if (controlMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
1357         switch(sceneMode) {
1358             case ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY:
1359                 mFacePriority = true;
1360                 break;
1361             default:
1362                 ALOGE("%s: Emulator doesn't support scene mode %d",
1363                         __FUNCTION__, sceneMode);
1364                 return BAD_VALUE;
1365         }
1366     } else {
1367         mFacePriority = false;
1368     }
1369 
1370     // controlMode == AUTO or sceneMode = FACE_PRIORITY
1371     // Process individual 3A controls
1372 
1373     res = doFakeAE(settings);
1374     if (res != OK) return res;
1375 
1376     res = doFakeAF(settings);
1377     if (res != OK) return res;
1378 
1379     res = doFakeAWB(settings);
1380     if (res != OK) return res;
1381 
1382     update3A(settings);
1383     return OK;
1384 }
1385 
doFakeAE(CameraMetadata & settings)1386 status_t EmulatedFakeCamera3::doFakeAE(CameraMetadata &settings) {
1387     camera_metadata_entry e;
1388 
1389     e = settings.find(ANDROID_CONTROL_AE_MODE);
1390     if (e.count == 0) {
1391         ALOGE("%s: No AE mode entry!", __FUNCTION__);
1392         return BAD_VALUE;
1393     }
1394     uint8_t aeMode = e.data.u8[0];
1395 
1396     switch (aeMode) {
1397         case ANDROID_CONTROL_AE_MODE_OFF:
1398             // AE is OFF
1399             mAeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
1400             return OK;
1401         case ANDROID_CONTROL_AE_MODE_ON:
1402             // OK for AUTO modes
1403             break;
1404         default:
1405             ALOGE("%s: Emulator doesn't support AE mode %d",
1406                     __FUNCTION__, aeMode);
1407             return BAD_VALUE;
1408     }
1409 
1410     e = settings.find(ANDROID_CONTROL_AE_LOCK);
1411     if (e.count == 0) {
1412         ALOGE("%s: No AE lock entry!", __FUNCTION__);
1413         return BAD_VALUE;
1414     }
1415     bool aeLocked = (e.data.u8[0] == ANDROID_CONTROL_AE_LOCK_ON);
1416 
1417     e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER);
1418     bool precaptureTrigger = false;
1419     if (e.count != 0) {
1420         precaptureTrigger =
1421                 (e.data.u8[0] == ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_START);
1422     }
1423 
1424     if (precaptureTrigger) {
1425         ALOGV("%s: Pre capture trigger = %d", __FUNCTION__, precaptureTrigger);
1426     } else if (e.count > 0) {
1427         ALOGV("%s: Pre capture trigger was present? %zu",
1428               __FUNCTION__,
1429               e.count);
1430     }
1431 
1432     // If we have an aePrecaptureTrigger, aePrecaptureId should be set too
1433     if (e.count != 0) {
1434         e = settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID);
1435 
1436         if (e.count == 0) {
1437             ALOGE("%s: When android.control.aePrecaptureTrigger is set "
1438                   " in the request, aePrecaptureId needs to be set as well",
1439                   __FUNCTION__);
1440             return BAD_VALUE;
1441         }
1442 
1443         mAeTriggerId = e.data.i32[0];
1444     }
1445 
1446     if (precaptureTrigger || mAeState == ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1447         // Run precapture sequence
1448         if (mAeState != ANDROID_CONTROL_AE_STATE_PRECAPTURE) {
1449             mAeCounter = 0;
1450         }
1451 
1452         if (mFacePriority) {
1453             mAeTargetExposureTime = kFacePriorityExposureTime;
1454         } else {
1455             mAeTargetExposureTime = kNormalExposureTime;
1456         }
1457 
1458         if (mAeCounter > kPrecaptureMinFrames &&
1459                 (mAeTargetExposureTime - mAeCurrentExposureTime) <
1460                 mAeTargetExposureTime / 10) {
1461             // Done with precapture
1462             mAeCounter = 0;
1463             mAeState = aeLocked ? ANDROID_CONTROL_AE_STATE_LOCKED :
1464                     ANDROID_CONTROL_AE_STATE_CONVERGED;
1465         } else {
1466             // Converge some more
1467             mAeCurrentExposureTime +=
1468                     (mAeTargetExposureTime - mAeCurrentExposureTime) *
1469                     kExposureTrackRate;
1470             mAeCounter++;
1471             mAeState = ANDROID_CONTROL_AE_STATE_PRECAPTURE;
1472         }
1473 
1474     } else if (!aeLocked) {
1475         // Run standard occasional AE scan
1476         switch (mAeState) {
1477             case ANDROID_CONTROL_AE_STATE_CONVERGED:
1478             case ANDROID_CONTROL_AE_STATE_INACTIVE:
1479                 mAeCounter++;
1480                 if (mAeCounter > kStableAeMaxFrames) {
1481                     mAeTargetExposureTime =
1482                             mFacePriority ? kFacePriorityExposureTime :
1483                             kNormalExposureTime;
1484                     float exposureStep = ((double)rand() / RAND_MAX) *
1485                             (kExposureWanderMax - kExposureWanderMin) +
1486                             kExposureWanderMin;
1487                     mAeTargetExposureTime *= std::pow(2, exposureStep);
1488                     mAeState = ANDROID_CONTROL_AE_STATE_SEARCHING;
1489                 }
1490                 break;
1491             case ANDROID_CONTROL_AE_STATE_SEARCHING:
1492                 mAeCurrentExposureTime +=
1493                         (mAeTargetExposureTime - mAeCurrentExposureTime) *
1494                         kExposureTrackRate;
1495                 if (abs(mAeTargetExposureTime - mAeCurrentExposureTime) <
1496                         mAeTargetExposureTime / 10) {
1497                     // Close enough
1498                     mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1499                     mAeCounter = 0;
1500                 }
1501                 break;
1502             case ANDROID_CONTROL_AE_STATE_LOCKED:
1503                 mAeState = ANDROID_CONTROL_AE_STATE_CONVERGED;
1504                 mAeCounter = 0;
1505                 break;
1506             default:
1507                 ALOGE("%s: Emulator in unexpected AE state %d",
1508                         __FUNCTION__, mAeState);
1509                 return INVALID_OPERATION;
1510         }
1511     } else {
1512         // AE is locked
1513         mAeState = ANDROID_CONTROL_AE_STATE_LOCKED;
1514     }
1515 
1516     return OK;
1517 }
1518 
doFakeAF(CameraMetadata & settings)1519 status_t EmulatedFakeCamera3::doFakeAF(CameraMetadata &settings) {
1520     camera_metadata_entry e;
1521 
1522     e = settings.find(ANDROID_CONTROL_AF_MODE);
1523     if (e.count == 0) {
1524         ALOGE("%s: No AF mode entry!", __FUNCTION__);
1525         return BAD_VALUE;
1526     }
1527     uint8_t afMode = e.data.u8[0];
1528 
1529     e = settings.find(ANDROID_CONTROL_AF_TRIGGER);
1530     typedef camera_metadata_enum_android_control_af_trigger af_trigger_t;
1531     af_trigger_t afTrigger;
1532     // If we have an afTrigger, afTriggerId should be set too
1533     if (e.count != 0) {
1534         afTrigger = static_cast<af_trigger_t>(e.data.u8[0]);
1535 
1536         e = settings.find(ANDROID_CONTROL_AF_TRIGGER_ID);
1537 
1538         if (e.count == 0) {
1539             ALOGE("%s: When android.control.afTrigger is set "
1540                   " in the request, afTriggerId needs to be set as well",
1541                   __FUNCTION__);
1542             return BAD_VALUE;
1543         }
1544 
1545         mAfTriggerId = e.data.i32[0];
1546 
1547         ALOGV("%s: AF trigger set to 0x%x", __FUNCTION__, afTrigger);
1548         ALOGV("%s: AF trigger ID set to 0x%x", __FUNCTION__, mAfTriggerId);
1549         ALOGV("%s: AF mode is 0x%x", __FUNCTION__, afMode);
1550     } else {
1551         afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
1552     }
1553 
1554     switch (afMode) {
1555         case ANDROID_CONTROL_AF_MODE_OFF:
1556             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1557             return OK;
1558         case ANDROID_CONTROL_AF_MODE_AUTO:
1559         case ANDROID_CONTROL_AF_MODE_MACRO:
1560         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1561         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1562             if (!mFacingBack) {
1563                 ALOGE("%s: Front camera doesn't support AF mode %d",
1564                         __FUNCTION__, afMode);
1565                 return BAD_VALUE;
1566             }
1567             // OK, handle transitions lower on
1568             break;
1569         default:
1570             ALOGE("%s: Emulator doesn't support AF mode %d",
1571                     __FUNCTION__, afMode);
1572             return BAD_VALUE;
1573     }
1574 
1575     bool afModeChanged = mAfMode != afMode;
1576     mAfMode = afMode;
1577 
1578     /**
1579      * Simulate AF triggers. Transition at most 1 state per frame.
1580      * - Focusing always succeeds (goes into locked, or PASSIVE_SCAN).
1581      */
1582 
1583     bool afTriggerStart = false;
1584     bool afTriggerCancel = false;
1585     switch (afTrigger) {
1586         case ANDROID_CONTROL_AF_TRIGGER_IDLE:
1587             break;
1588         case ANDROID_CONTROL_AF_TRIGGER_START:
1589             afTriggerStart = true;
1590             break;
1591         case ANDROID_CONTROL_AF_TRIGGER_CANCEL:
1592             afTriggerCancel = true;
1593             // Cancel trigger always transitions into INACTIVE
1594             mAfState = ANDROID_CONTROL_AF_STATE_INACTIVE;
1595 
1596             ALOGV("%s: AF State transition to STATE_INACTIVE", __FUNCTION__);
1597 
1598             // Stay in 'inactive' until at least next frame
1599             return OK;
1600         default:
1601             ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, afTrigger);
1602             return BAD_VALUE;
1603     }
1604 
1605     // If we get down here, we're either in an autofocus mode
1606     //  or in a continuous focus mode (and no other modes)
1607 
1608     int oldAfState = mAfState;
1609     switch (mAfState) {
1610         case ANDROID_CONTROL_AF_STATE_INACTIVE:
1611             if (afTriggerStart) {
1612                 switch (afMode) {
1613                     case ANDROID_CONTROL_AF_MODE_AUTO:
1614                         // fall-through
1615                     case ANDROID_CONTROL_AF_MODE_MACRO:
1616                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
1617                         break;
1618                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1619                         // fall-through
1620                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1621                         mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1622                         break;
1623                 }
1624             } else {
1625                 // At least one frame stays in INACTIVE
1626                 if (!afModeChanged) {
1627                     switch (afMode) {
1628                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1629                             // fall-through
1630                         case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1631                             mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN;
1632                             break;
1633                     }
1634                 }
1635             }
1636             break;
1637         case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN:
1638             /**
1639              * When the AF trigger is activated, the algorithm should finish
1640              * its PASSIVE_SCAN if active, and then transition into AF_FOCUSED
1641              * or AF_NOT_FOCUSED as appropriate
1642              */
1643             if (afTriggerStart) {
1644                 // Randomly transition to focused or not focused
1645                 if (rand() % 3) {
1646                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1647                 } else {
1648                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1649                 }
1650             }
1651             /**
1652              * When the AF trigger is not involved, the AF algorithm should
1653              * start in INACTIVE state, and then transition into PASSIVE_SCAN
1654              * and PASSIVE_FOCUSED states
1655              */
1656             else if (!afTriggerCancel) {
1657                // Randomly transition to passive focus
1658                 if (rand() % 3 == 0) {
1659                     mAfState = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED;
1660                 }
1661             }
1662 
1663             break;
1664         case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED:
1665             if (afTriggerStart) {
1666                 // Randomly transition to focused or not focused
1667                 if (rand() % 3) {
1668                     mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1669                 } else {
1670                     mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1671                 }
1672             }
1673             // TODO: initiate passive scan (PASSIVE_SCAN)
1674             break;
1675         case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN:
1676             // Simulate AF sweep completing instantaneously
1677 
1678             // Randomly transition to focused or not focused
1679             if (rand() % 3) {
1680                 mAfState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED;
1681             } else {
1682                 mAfState = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
1683             }
1684             break;
1685         case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED:
1686             if (afTriggerStart) {
1687                 switch (afMode) {
1688                     case ANDROID_CONTROL_AF_MODE_AUTO:
1689                         // fall-through
1690                     case ANDROID_CONTROL_AF_MODE_MACRO:
1691                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
1692                         break;
1693                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1694                         // fall-through
1695                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1696                         // continuous autofocus => trigger start has no effect
1697                         break;
1698                 }
1699             }
1700             break;
1701         case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
1702             if (afTriggerStart) {
1703                 switch (afMode) {
1704                     case ANDROID_CONTROL_AF_MODE_AUTO:
1705                         // fall-through
1706                     case ANDROID_CONTROL_AF_MODE_MACRO:
1707                         mAfState = ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN;
1708                         break;
1709                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
1710                         // fall-through
1711                     case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
1712                         // continuous autofocus => trigger start has no effect
1713                         break;
1714                 }
1715             }
1716             break;
1717         default:
1718             ALOGE("%s: Bad af state %d", __FUNCTION__, mAfState);
1719     }
1720 
1721     {
1722         char afStateString[100] = {0,};
1723         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
1724                 oldAfState,
1725                 afStateString,
1726                 sizeof(afStateString));
1727 
1728         char afNewStateString[100] = {0,};
1729         camera_metadata_enum_snprint(ANDROID_CONTROL_AF_STATE,
1730                 mAfState,
1731                 afNewStateString,
1732                 sizeof(afNewStateString));
1733         ALOGVV("%s: AF state transitioned from %s to %s",
1734               __FUNCTION__, afStateString, afNewStateString);
1735     }
1736 
1737 
1738     return OK;
1739 }
1740 
doFakeAWB(CameraMetadata & settings)1741 status_t EmulatedFakeCamera3::doFakeAWB(CameraMetadata &settings) {
1742     camera_metadata_entry e;
1743 
1744     e = settings.find(ANDROID_CONTROL_AWB_MODE);
1745     if (e.count == 0) {
1746         ALOGE("%s: No AWB mode entry!", __FUNCTION__);
1747         return BAD_VALUE;
1748     }
1749     uint8_t awbMode = e.data.u8[0];
1750 
1751     // TODO: Add white balance simulation
1752 
1753     switch (awbMode) {
1754         case ANDROID_CONTROL_AWB_MODE_OFF:
1755             mAwbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
1756             return OK;
1757         case ANDROID_CONTROL_AWB_MODE_AUTO:
1758         case ANDROID_CONTROL_AWB_MODE_INCANDESCENT:
1759         case ANDROID_CONTROL_AWB_MODE_FLUORESCENT:
1760         case ANDROID_CONTROL_AWB_MODE_DAYLIGHT:
1761         case ANDROID_CONTROL_AWB_MODE_SHADE:
1762             // OK
1763             break;
1764         default:
1765             ALOGE("%s: Emulator doesn't support AWB mode %d",
1766                     __FUNCTION__, awbMode);
1767             return BAD_VALUE;
1768     }
1769 
1770     return OK;
1771 }
1772 
1773 
update3A(CameraMetadata & settings)1774 void EmulatedFakeCamera3::update3A(CameraMetadata &settings) {
1775     if (mAeState != ANDROID_CONTROL_AE_STATE_INACTIVE) {
1776         settings.update(ANDROID_SENSOR_EXPOSURE_TIME,
1777                 &mAeCurrentExposureTime, 1);
1778         settings.update(ANDROID_SENSOR_SENSITIVITY,
1779                 &mAeCurrentSensitivity, 1);
1780     }
1781 
1782     settings.update(ANDROID_CONTROL_AE_STATE,
1783             &mAeState, 1);
1784     settings.update(ANDROID_CONTROL_AF_STATE,
1785             &mAfState, 1);
1786     settings.update(ANDROID_CONTROL_AWB_STATE,
1787             &mAwbState, 1);
1788     /**
1789      * TODO: Trigger IDs need a think-through
1790      */
1791     settings.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
1792             &mAeTriggerId, 1);
1793     settings.update(ANDROID_CONTROL_AF_TRIGGER_ID,
1794             &mAfTriggerId, 1);
1795 }
1796 
signalReadoutIdle()1797 void EmulatedFakeCamera3::signalReadoutIdle() {
1798     Mutex::Autolock l(mLock);
1799     // Need to chek isIdle again because waiting on mLock may have allowed
1800     // something to be placed in the in-flight queue.
1801     if (mStatus == STATUS_ACTIVE && mReadoutThread->isIdle()) {
1802         ALOGV("Now idle");
1803         mStatus = STATUS_READY;
1804     }
1805 }
1806 
onSensorEvent(uint32_t frameNumber,Event e,nsecs_t timestamp)1807 void EmulatedFakeCamera3::onSensorEvent(uint32_t frameNumber, Event e,
1808         nsecs_t timestamp) {
1809     switch(e) {
1810         case Sensor::SensorListener::EXPOSURE_START: {
1811             ALOGVV("%s: Frame %d: Sensor started exposure at %lld",
1812                     __FUNCTION__, frameNumber, timestamp);
1813             // Trigger shutter notify to framework
1814             camera3_notify_msg_t msg;
1815             msg.type = CAMERA3_MSG_SHUTTER;
1816             msg.message.shutter.frame_number = frameNumber;
1817             msg.message.shutter.timestamp = timestamp;
1818             sendNotify(&msg);
1819             break;
1820         }
1821         default:
1822             ALOGW("%s: Unexpected sensor event %d at %" PRId64, __FUNCTION__,
1823                     e, timestamp);
1824             break;
1825     }
1826 }
1827 
ReadoutThread(EmulatedFakeCamera3 * parent)1828 EmulatedFakeCamera3::ReadoutThread::ReadoutThread(EmulatedFakeCamera3 *parent) :
1829         mParent(parent), mJpegWaiting(false) {
1830 }
1831 
~ReadoutThread()1832 EmulatedFakeCamera3::ReadoutThread::~ReadoutThread() {
1833     for (List<Request>::iterator i = mInFlightQueue.begin();
1834          i != mInFlightQueue.end(); i++) {
1835         delete i->buffers;
1836         delete i->sensorBuffers;
1837     }
1838 }
1839 
queueCaptureRequest(const Request & r)1840 void EmulatedFakeCamera3::ReadoutThread::queueCaptureRequest(const Request &r) {
1841     Mutex::Autolock l(mLock);
1842 
1843     mInFlightQueue.push_back(r);
1844     mInFlightSignal.signal();
1845 }
1846 
isIdle()1847 bool EmulatedFakeCamera3::ReadoutThread::isIdle() {
1848     Mutex::Autolock l(mLock);
1849     return mInFlightQueue.empty() && !mThreadActive;
1850 }
1851 
waitForReadout()1852 status_t EmulatedFakeCamera3::ReadoutThread::waitForReadout() {
1853     status_t res;
1854     Mutex::Autolock l(mLock);
1855     int loopCount = 0;
1856     while (mInFlightQueue.size() >= kMaxQueueSize) {
1857         res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1858         if (res != OK && res != TIMED_OUT) {
1859             ALOGE("%s: Error waiting for in-flight queue to shrink",
1860                     __FUNCTION__);
1861             return INVALID_OPERATION;
1862         }
1863         if (loopCount == kMaxWaitLoops) {
1864             ALOGE("%s: Timed out waiting for in-flight queue to shrink",
1865                     __FUNCTION__);
1866             return TIMED_OUT;
1867         }
1868         loopCount++;
1869     }
1870     return OK;
1871 }
1872 
threadLoop()1873 bool EmulatedFakeCamera3::ReadoutThread::threadLoop() {
1874     status_t res;
1875 
1876     ALOGVV("%s: ReadoutThread waiting for request", __FUNCTION__);
1877 
1878     // First wait for a request from the in-flight queue
1879 
1880     if (mCurrentRequest.settings.isEmpty()) {
1881         Mutex::Autolock l(mLock);
1882         if (mInFlightQueue.empty()) {
1883             res = mInFlightSignal.waitRelative(mLock, kWaitPerLoop);
1884             if (res == TIMED_OUT) {
1885                 ALOGVV("%s: ReadoutThread: Timed out waiting for request",
1886                         __FUNCTION__);
1887                 return true;
1888             } else if (res != NO_ERROR) {
1889                 ALOGE("%s: Error waiting for capture requests: %d",
1890                         __FUNCTION__, res);
1891                 return false;
1892             }
1893         }
1894         mCurrentRequest.frameNumber = mInFlightQueue.begin()->frameNumber;
1895         mCurrentRequest.settings.acquire(mInFlightQueue.begin()->settings);
1896         mCurrentRequest.buffers = mInFlightQueue.begin()->buffers;
1897         mCurrentRequest.sensorBuffers = mInFlightQueue.begin()->sensorBuffers;
1898         mInFlightQueue.erase(mInFlightQueue.begin());
1899         mInFlightSignal.signal();
1900         mThreadActive = true;
1901         ALOGVV("%s: Beginning readout of frame %d", __FUNCTION__,
1902                 mCurrentRequest.frameNumber);
1903     }
1904 
1905     // Then wait for it to be delivered from the sensor
1906     ALOGVV("%s: ReadoutThread: Wait for frame to be delivered from sensor",
1907             __FUNCTION__);
1908 
1909     nsecs_t captureTime;
1910     bool gotFrame =
1911             mParent->mSensor->waitForNewFrame(kWaitPerLoop, &captureTime);
1912     if (!gotFrame) {
1913         ALOGVV("%s: ReadoutThread: Timed out waiting for sensor frame",
1914                 __FUNCTION__);
1915         return true;
1916     }
1917 
1918     ALOGVV("Sensor done with readout for frame %d, captured at %lld ",
1919             mCurrentRequest.frameNumber, captureTime);
1920 
1921     // Check if we need to JPEG encode a buffer, and send it for async
1922     // compression if so. Otherwise prepare the buffer for return.
1923     bool needJpeg = false;
1924     HalBufferVector::iterator buf = mCurrentRequest.buffers->begin();
1925     while(buf != mCurrentRequest.buffers->end()) {
1926         bool goodBuffer = true;
1927         if ( buf->stream->format ==
1928                 HAL_PIXEL_FORMAT_BLOB) {
1929             Mutex::Autolock jl(mJpegLock);
1930             if (mJpegWaiting) {
1931                 // This shouldn't happen, because processCaptureRequest should
1932                 // be stalling until JPEG compressor is free.
1933                 ALOGE("%s: Already processing a JPEG!", __FUNCTION__);
1934                 goodBuffer = false;
1935             }
1936             if (goodBuffer) {
1937                 // Compressor takes ownership of sensorBuffers here
1938                 res = mParent->mJpegCompressor->start(mCurrentRequest.sensorBuffers,
1939                         this);
1940                 goodBuffer = (res == OK);
1941             }
1942             if (goodBuffer) {
1943                 needJpeg = true;
1944 
1945                 mJpegHalBuffer = *buf;
1946                 mJpegFrameNumber = mCurrentRequest.frameNumber;
1947                 mJpegWaiting = true;
1948 
1949                 mCurrentRequest.sensorBuffers = NULL;
1950                 buf = mCurrentRequest.buffers->erase(buf);
1951 
1952                 continue;
1953             }
1954             ALOGE("%s: Error compressing output buffer: %s (%d)",
1955                         __FUNCTION__, strerror(-res), res);
1956             // fallthrough for cleanup
1957         }
1958         GraphicBufferMapper::get().unlock(*(buf->buffer));
1959 
1960         buf->status = goodBuffer ? CAMERA3_BUFFER_STATUS_OK :
1961                 CAMERA3_BUFFER_STATUS_ERROR;
1962         buf->acquire_fence = -1;
1963         buf->release_fence = -1;
1964 
1965         ++buf;
1966     } // end while
1967 
1968     // Construct result for all completed buffers and results
1969 
1970     camera3_capture_result result;
1971 
1972     mCurrentRequest.settings.update(ANDROID_SENSOR_TIMESTAMP,
1973             &captureTime, 1);
1974 
1975     result.frame_number = mCurrentRequest.frameNumber;
1976     result.result = mCurrentRequest.settings.getAndLock();
1977     result.num_output_buffers = mCurrentRequest.buffers->size();
1978     result.output_buffers = mCurrentRequest.buffers->array();
1979 
1980     // Go idle if queue is empty, before sending result
1981     bool signalIdle = false;
1982     {
1983         Mutex::Autolock l(mLock);
1984         if (mInFlightQueue.empty()) {
1985             mThreadActive = false;
1986             signalIdle = true;
1987         }
1988     }
1989     if (signalIdle) mParent->signalReadoutIdle();
1990 
1991     // Send it off to the framework
1992     ALOGVV("%s: ReadoutThread: Send result to framework",
1993             __FUNCTION__);
1994     mParent->sendCaptureResult(&result);
1995 
1996     // Clean up
1997     mCurrentRequest.settings.unlock(result.result);
1998 
1999     delete mCurrentRequest.buffers;
2000     mCurrentRequest.buffers = NULL;
2001     if (!needJpeg) {
2002         delete mCurrentRequest.sensorBuffers;
2003         mCurrentRequest.sensorBuffers = NULL;
2004     }
2005     mCurrentRequest.settings.clear();
2006 
2007     return true;
2008 }
2009 
onJpegDone(const StreamBuffer & jpegBuffer,bool success)2010 void EmulatedFakeCamera3::ReadoutThread::onJpegDone(
2011         const StreamBuffer &jpegBuffer, bool success) {
2012     Mutex::Autolock jl(mJpegLock);
2013 
2014     GraphicBufferMapper::get().unlock(*(jpegBuffer.buffer));
2015 
2016     mJpegHalBuffer.status = success ?
2017             CAMERA3_BUFFER_STATUS_OK : CAMERA3_BUFFER_STATUS_ERROR;
2018     mJpegHalBuffer.acquire_fence = -1;
2019     mJpegHalBuffer.release_fence = -1;
2020     mJpegWaiting = false;
2021 
2022     camera3_capture_result result;
2023     result.frame_number = mJpegFrameNumber;
2024     result.result = NULL;
2025     result.num_output_buffers = 1;
2026     result.output_buffers = &mJpegHalBuffer;
2027 
2028     if (!success) {
2029         ALOGE("%s: Compression failure, returning error state buffer to"
2030                 " framework", __FUNCTION__);
2031     } else {
2032         ALOGV("%s: Compression complete, returning buffer to framework",
2033                 __FUNCTION__);
2034     }
2035 
2036     mParent->sendCaptureResult(&result);
2037 }
2038 
onJpegInputDone(const StreamBuffer & inputBuffer)2039 void EmulatedFakeCamera3::ReadoutThread::onJpegInputDone(
2040         const StreamBuffer &inputBuffer) {
2041     // Should never get here, since the input buffer has to be returned
2042     // by end of processCaptureRequest
2043     ALOGE("%s: Unexpected input buffer from JPEG compressor!", __FUNCTION__);
2044 }
2045 
2046 
2047 }; // namespace android
2048